mirror of
https://github.com/CommunitySolidServer/CommunitySolidServer.git
synced 2024-10-03 14:55:10 +00:00
refactor: Clean up utility functions
This commit is contained in:
parent
82f3aa0cd8
commit
1073c2ff4c
@ -11,9 +11,6 @@
|
||||
"WebAclAuthorizer:_aclManager": {
|
||||
"@id": "urn:solid-server:default:AclManager"
|
||||
},
|
||||
"WebAclAuthorizer:_containerManager": {
|
||||
"@id": "urn:solid-server:default:UrlContainerManager"
|
||||
},
|
||||
"WebAclAuthorizer:_resourceStore": {
|
||||
"@id": "urn:solid-server:default:ResourceStore_Patching"
|
||||
}
|
||||
|
@ -52,14 +52,6 @@
|
||||
"RepresentationConvertingStore:_outConverter": {
|
||||
"@id": "urn:solid-server:default:RepresentationConverter"
|
||||
}
|
||||
},
|
||||
|
||||
{
|
||||
"@id": "urn:solid-server:default:UrlContainerManager",
|
||||
"@type": "UrlContainerManager",
|
||||
"UrlContainerManager:_base": {
|
||||
"@id": "urn:solid-server:default:variable:base"
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
|
@ -1,10 +1,6 @@
|
||||
{
|
||||
"@context": "https://linkedsoftwaredependencies.org/bundles/npm/@solid/community-server/^1.0.0/components/context.jsonld",
|
||||
"@graph": [
|
||||
{
|
||||
"@id": "urn:solid-server:default:MetadataController",
|
||||
"@type": "MetadataController"
|
||||
},
|
||||
{
|
||||
"@id": "urn:solid-server:default:FileIdentifierMapper",
|
||||
"@type": "ExtensionBasedMapper",
|
||||
@ -22,9 +18,6 @@
|
||||
"@type": "FileDataAccessor",
|
||||
"FileDataAccessor:_resourceMapper": {
|
||||
"@id": "urn:solid-server:default:FileIdentifierMapper"
|
||||
},
|
||||
"FileDataAccessor:_metadataController": {
|
||||
"@id": "urn:solid-server:default:MetadataController"
|
||||
}
|
||||
},
|
||||
{
|
||||
@ -35,12 +28,6 @@
|
||||
},
|
||||
"DataAccessorBasedStore:_base": {
|
||||
"@id": "urn:solid-server:default:variable:base"
|
||||
},
|
||||
"DataAccessorBasedStore:_metadataController": {
|
||||
"@id": "urn:solid-server:default:MetadataController"
|
||||
},
|
||||
"DataAccessorBasedStore:_containerManager": {
|
||||
"@id": "urn:solid-server:default:UrlContainerManager"
|
||||
}
|
||||
}
|
||||
]
|
||||
|
@ -1,18 +1,11 @@
|
||||
{
|
||||
"@context": "https://linkedsoftwaredependencies.org/bundles/npm/@solid/community-server/^1.0.0/components/context.jsonld",
|
||||
"@graph": [
|
||||
{
|
||||
"@id": "urn:solid-server:default:MetadataController",
|
||||
"@type": "MetadataController"
|
||||
},
|
||||
{
|
||||
"@id": "urn:solid-server:default:MemoryDataAccessor",
|
||||
"@type": "InMemoryDataAccessor",
|
||||
"InMemoryDataAccessor:_base": {
|
||||
"@id": "urn:solid-server:default:variable:base"
|
||||
},
|
||||
"InMemoryDataAccessor:_metadataController": {
|
||||
"@id": "urn:solid-server:default:MetadataController"
|
||||
}
|
||||
},
|
||||
{
|
||||
@ -23,12 +16,6 @@
|
||||
},
|
||||
"DataAccessorBasedStore:_base": {
|
||||
"@id": "urn:solid-server:default:variable:base"
|
||||
},
|
||||
"DataAccessorBasedStore:_metadataController": {
|
||||
"@id": "urn:solid-server:default:MetadataController"
|
||||
},
|
||||
"DataAccessorBasedStore:_containerManager": {
|
||||
"@id": "urn:solid-server:default:UrlContainerManager"
|
||||
}
|
||||
}
|
||||
]
|
||||
|
@ -1,11 +1,6 @@
|
||||
{
|
||||
"@context": "https://linkedsoftwaredependencies.org/bundles/npm/@solid/community-server/^1.0.0/components/context.jsonld",
|
||||
"@graph": [
|
||||
{
|
||||
"@id": "urn:solid-server:default:MetadataController",
|
||||
"@type": "MetadataController"
|
||||
},
|
||||
|
||||
{
|
||||
"@id": "urn:solid-server:default:SparqlDataAccessor",
|
||||
"@type": "SparqlDataAccessor",
|
||||
@ -14,12 +9,6 @@
|
||||
},
|
||||
"SparqlDataAccessor:_base": {
|
||||
"@id": "urn:solid-server:default:variable:base"
|
||||
},
|
||||
"SparqlDataAccessor:_containerManager": {
|
||||
"@id": "urn:solid-server:default:UrlContainerManager"
|
||||
},
|
||||
"SparqlDataAccessor:_metadataController": {
|
||||
"@id": "urn:solid-server:default:MetadataController"
|
||||
}
|
||||
},
|
||||
|
||||
@ -31,12 +20,6 @@
|
||||
},
|
||||
"DataAccessorBasedStore:_base": {
|
||||
"@id": "urn:solid-server:default:variable:base"
|
||||
},
|
||||
"DataAccessorBasedStore:_metadataController": {
|
||||
"@id": "urn:solid-server:default:MetadataController"
|
||||
},
|
||||
"DataAccessorBasedStore:_containerManager": {
|
||||
"@id": "urn:solid-server:default:UrlContainerManager"
|
||||
}
|
||||
},
|
||||
|
||||
|
7
index.ts
7
index.ts
@ -115,7 +115,6 @@ export * from './src/storage/routing/RouterRule';
|
||||
// Storage
|
||||
export * from './src/storage/AtomicResourceStore';
|
||||
export * from './src/storage/Conditions';
|
||||
export * from './src/storage/ContainerManager';
|
||||
export * from './src/storage/DataAccessorBasedStore';
|
||||
export * from './src/storage/ExtensionBasedMapper';
|
||||
export * from './src/storage/FileIdentifierMapper';
|
||||
@ -128,7 +127,6 @@ export * from './src/storage/ResourceLocker';
|
||||
export * from './src/storage/ResourceStore';
|
||||
export * from './src/storage/RoutingResourceStore';
|
||||
export * from './src/storage/SingleThreadedResourceLocker';
|
||||
export * from './src/storage/UrlContainerManager';
|
||||
export * from './src/storage/WrappedExpiringResourceLocker';
|
||||
|
||||
// Util/Errors
|
||||
@ -147,5 +145,6 @@ export * from './src/util/AllVoidCompositeHandler';
|
||||
export * from './src/util/AsyncHandler';
|
||||
export * from './src/util/FirstCompositeHandler';
|
||||
export * from './src/util/HeaderUtil';
|
||||
export * from './src/util/MetadataController';
|
||||
export * from './src/util/Util';
|
||||
export * from './src/util/PathUtil';
|
||||
export * from './src/util/QuadUtil';
|
||||
export * from './src/util/StreamUtil';
|
||||
|
@ -5,12 +5,12 @@ import type { PermissionSet } from '../ldp/permissions/PermissionSet';
|
||||
import type { Representation } from '../ldp/representation/Representation';
|
||||
import type { ResourceIdentifier } from '../ldp/representation/ResourceIdentifier';
|
||||
import { getLoggerFor } from '../logging/LogUtil';
|
||||
import type { ContainerManager } from '../storage/ContainerManager';
|
||||
import type { ResourceStore } from '../storage/ResourceStore';
|
||||
import { INTERNAL_QUADS } from '../util/ContentTypes';
|
||||
import { ForbiddenHttpError } from '../util/errors/ForbiddenHttpError';
|
||||
import { NotFoundHttpError } from '../util/errors/NotFoundHttpError';
|
||||
import { UnauthorizedHttpError } from '../util/errors/UnauthorizedHttpError';
|
||||
import { getParentContainer } from '../util/PathUtil';
|
||||
import { ACL, FOAF } from '../util/UriConstants';
|
||||
import type { AclManager } from './AclManager';
|
||||
import type { AuthorizerArgs } from './Authorizer';
|
||||
@ -25,13 +25,11 @@ export class WebAclAuthorizer extends Authorizer {
|
||||
protected readonly logger = getLoggerFor(this);
|
||||
|
||||
private readonly aclManager: AclManager;
|
||||
private readonly containerManager: ContainerManager;
|
||||
private readonly resourceStore: ResourceStore;
|
||||
|
||||
public constructor(aclManager: AclManager, containerManager: ContainerManager, resourceStore: ResourceStore) {
|
||||
public constructor(aclManager: AclManager, resourceStore: ResourceStore) {
|
||||
super();
|
||||
this.aclManager = aclManager;
|
||||
this.containerManager = containerManager;
|
||||
this.resourceStore = resourceStore;
|
||||
}
|
||||
|
||||
@ -134,7 +132,7 @@ export class WebAclAuthorizer extends Authorizer {
|
||||
}
|
||||
|
||||
this.logger.debug(`Traversing to the parent of ${id.path}`);
|
||||
const parent = await this.containerManager.getContainer(id);
|
||||
const parent = getParentContainer(id);
|
||||
return this.getAclRecursive(parent, true);
|
||||
}
|
||||
|
||||
|
@ -2,7 +2,7 @@ import { getLoggerFor } from '../../logging/LogUtil';
|
||||
import type { HttpResponse } from '../../server/HttpResponse';
|
||||
import { INTERNAL_QUADS } from '../../util/ContentTypes';
|
||||
import { UnsupportedHttpError } from '../../util/errors/UnsupportedHttpError';
|
||||
import { pipeSafe } from '../../util/Util';
|
||||
import { pipeSafely } from '../../util/StreamUtil';
|
||||
import type { MetadataWriter } from './metadata/MetadataWriter';
|
||||
import type { ResponseDescription } from './response/ResponseDescription';
|
||||
import { ResponseWriter } from './ResponseWriter';
|
||||
@ -34,7 +34,7 @@ export class BasicResponseWriter extends ResponseWriter {
|
||||
input.response.writeHead(input.result.statusCode);
|
||||
|
||||
if (input.result.data) {
|
||||
const pipe = pipeSafe(input.result.data, input.response);
|
||||
const pipe = pipeSafely(input.result.data, input.response);
|
||||
pipe.on('error', (error): void => {
|
||||
this.logger.error(`Writing to HttpResponse failed with message ${error.message}`);
|
||||
});
|
||||
|
@ -1,7 +1,7 @@
|
||||
import type { TLSSocket } from 'tls';
|
||||
import { getLoggerFor } from '../../logging/LogUtil';
|
||||
import type { HttpRequest } from '../../server/HttpRequest';
|
||||
import { toCanonicalUriPath } from '../../util/Util';
|
||||
import { toCanonicalUriPath } from '../../util/PathUtil';
|
||||
import type { ResourceIdentifier } from '../representation/ResourceIdentifier';
|
||||
import { TargetExtractor } from './TargetExtractor';
|
||||
|
||||
|
@ -5,7 +5,7 @@ import { getLoggerFor } from '../../logging/LogUtil';
|
||||
import { APPLICATION_SPARQL_UPDATE } from '../../util/ContentTypes';
|
||||
import { UnsupportedHttpError } from '../../util/errors/UnsupportedHttpError';
|
||||
import { UnsupportedMediaTypeHttpError } from '../../util/errors/UnsupportedMediaTypeHttpError';
|
||||
import { pipeSafe, readableToString } from '../../util/Util';
|
||||
import { pipeSafely, readableToString } from '../../util/StreamUtil';
|
||||
import type { BodyParserArgs } from './BodyParser';
|
||||
import { BodyParser } from './BodyParser';
|
||||
import type { SparqlUpdatePatch } from './SparqlUpdatePatch';
|
||||
@ -29,8 +29,8 @@ export class SparqlUpdateBodyParser extends BodyParser {
|
||||
// Note that readableObjectMode is only defined starting from Node 12
|
||||
// It is impossible to check if object mode is enabled in Node 10 (without accessing private variables)
|
||||
const options = { objectMode: request.readableObjectMode };
|
||||
const toAlgebraStream = pipeSafe(request, new PassThrough(options));
|
||||
const dataCopy = pipeSafe(request, new PassThrough(options));
|
||||
const toAlgebraStream = pipeSafely(request, new PassThrough(options));
|
||||
const dataCopy = pipeSafely(request, new PassThrough(options));
|
||||
let algebra: Algebra.Operation;
|
||||
try {
|
||||
const sparql = await readableToString(toAlgebraStream);
|
||||
|
@ -1,5 +1,5 @@
|
||||
import type { HttpResponse } from '../../../server/HttpResponse';
|
||||
import { addHeader } from '../../../util/Util';
|
||||
import { addHeader } from '../../../util/HeaderUtil';
|
||||
import type { RepresentationMetadata } from '../../representation/RepresentationMetadata';
|
||||
import { MetadataWriter } from './MetadataWriter';
|
||||
|
||||
|
@ -1,5 +1,5 @@
|
||||
import type { HttpResponse } from '../../../server/HttpResponse';
|
||||
import { addHeader } from '../../../util/Util';
|
||||
import { addHeader } from '../../../util/HeaderUtil';
|
||||
import type { RepresentationMetadata } from '../../representation/RepresentationMetadata';
|
||||
import { MetadataWriter } from './MetadataWriter';
|
||||
|
||||
|
@ -1,16 +0,0 @@
|
||||
import type { ResourceIdentifier } from '../ldp/representation/ResourceIdentifier';
|
||||
|
||||
/**
|
||||
* Handles the identification of containers in which a resource is contained.
|
||||
*/
|
||||
export interface ContainerManager {
|
||||
/**
|
||||
* Finds the corresponding container.
|
||||
* Should throw an error if there is no such container (in the case of root).
|
||||
*
|
||||
* @param id - Identifier to find container of.
|
||||
*
|
||||
* @returns The identifier of the container this resource is in.
|
||||
*/
|
||||
getContainer: (id: ResourceIdentifier) => Promise<ResourceIdentifier>;
|
||||
}
|
@ -12,11 +12,11 @@ import { MethodNotAllowedHttpError } from '../util/errors/MethodNotAllowedHttpEr
|
||||
import { NotFoundHttpError } from '../util/errors/NotFoundHttpError';
|
||||
import { NotImplementedError } from '../util/errors/NotImplementedError';
|
||||
import { UnsupportedHttpError } from '../util/errors/UnsupportedHttpError';
|
||||
import type { MetadataController } from '../util/MetadataController';
|
||||
import { ensureTrailingSlash, getParentContainer, trimTrailingSlashes } from '../util/PathUtil';
|
||||
import { parseQuads } from '../util/QuadUtil';
|
||||
import { generateResourceQuads } from '../util/ResourceUtil';
|
||||
import { CONTENT_TYPE, HTTP, LDP, RDF } from '../util/UriConstants';
|
||||
import { ensureTrailingSlash, trimTrailingSlashes } from '../util/Util';
|
||||
import type { DataAccessor } from './accessors/DataAccessor';
|
||||
import type { ContainerManager } from './ContainerManager';
|
||||
import type { ResourceStore } from './ResourceStore';
|
||||
|
||||
/**
|
||||
@ -45,15 +45,10 @@ import type { ResourceStore } from './ResourceStore';
|
||||
export class DataAccessorBasedStore implements ResourceStore {
|
||||
private readonly accessor: DataAccessor;
|
||||
private readonly base: string;
|
||||
private readonly metadataController: MetadataController;
|
||||
private readonly containerManager: ContainerManager;
|
||||
|
||||
public constructor(accessor: DataAccessor, base: string, metadataController: MetadataController,
|
||||
containerManager: ContainerManager) {
|
||||
public constructor(accessor: DataAccessor, base: string) {
|
||||
this.accessor = accessor;
|
||||
this.base = ensureTrailingSlash(base);
|
||||
this.metadataController = metadataController;
|
||||
this.containerManager = containerManager;
|
||||
}
|
||||
|
||||
public async getRepresentation(identifier: ResourceIdentifier): Promise<Representation> {
|
||||
@ -219,13 +214,13 @@ export class DataAccessorBasedStore implements ResourceStore {
|
||||
}
|
||||
|
||||
if (createContainers) {
|
||||
await this.createRecursiveContainers(await this.containerManager.getContainer(identifier));
|
||||
await this.createRecursiveContainers(getParentContainer(identifier));
|
||||
}
|
||||
|
||||
// Make sure the metadata has the correct identifier and correct type quads
|
||||
const { metadata } = representation;
|
||||
metadata.identifier = DataFactory.namedNode(identifier.path);
|
||||
metadata.addQuads(this.metadataController.generateResourceQuads(metadata.identifier, isContainer));
|
||||
metadata.addQuads(generateResourceQuads(metadata.identifier, isContainer));
|
||||
|
||||
await (isContainer ?
|
||||
this.accessor.writeContainer(identifier, representation.metadata) :
|
||||
@ -241,7 +236,7 @@ export class DataAccessorBasedStore implements ResourceStore {
|
||||
protected async handleContainerData(representation: Representation): Promise<void> {
|
||||
let quads: Quad[];
|
||||
try {
|
||||
quads = await this.metadataController.parseQuads(representation.data);
|
||||
quads = await parseQuads(representation.data);
|
||||
} catch (error: unknown) {
|
||||
if (error instanceof Error) {
|
||||
throw new UnsupportedHttpError(`Can only create containers with RDF data. ${error.message}`);
|
||||
@ -349,7 +344,7 @@ export class DataAccessorBasedStore implements ResourceStore {
|
||||
} catch (error: unknown) {
|
||||
if (error instanceof NotFoundHttpError) {
|
||||
// Make sure the parent exists first
|
||||
await this.createRecursiveContainers(await this.containerManager.getContainer(container));
|
||||
await this.createRecursiveContainers(getParentContainer(container));
|
||||
await this.writeData(container, this.getEmptyContainerRepresentation(container), true);
|
||||
} else {
|
||||
throw error;
|
||||
|
@ -11,7 +11,7 @@ import {
|
||||
encodeUriPathComponents,
|
||||
ensureTrailingSlash,
|
||||
trimTrailingSlashes,
|
||||
} from '../util/Util';
|
||||
} from '../util/PathUtil';
|
||||
import type { FileIdentifierMapper, ResourceLink } from './FileIdentifierMapper';
|
||||
|
||||
const { join: joinPath, normalize: normalizePath } = posix;
|
||||
|
@ -1,34 +0,0 @@
|
||||
import type { ResourceIdentifier } from '../ldp/representation/ResourceIdentifier';
|
||||
import { ensureTrailingSlash } from '../util/Util';
|
||||
import type { ContainerManager } from './ContainerManager';
|
||||
|
||||
/**
|
||||
* Determines containers based on URL decomposition.
|
||||
*/
|
||||
export class UrlContainerManager implements ContainerManager {
|
||||
private readonly base: string;
|
||||
|
||||
public constructor(base: string) {
|
||||
this.base = base;
|
||||
}
|
||||
|
||||
public async getContainer(id: ResourceIdentifier): Promise<ResourceIdentifier> {
|
||||
const path = this.canonicalUrl(id.path);
|
||||
if (this.base === path) {
|
||||
throw new Error('Root does not have a container');
|
||||
}
|
||||
|
||||
const parentPath = new URL('..', path).toString();
|
||||
|
||||
// This probably means there is an issue with the root
|
||||
if (parentPath === path) {
|
||||
throw new Error('URL root reached');
|
||||
}
|
||||
|
||||
return { path: parentPath };
|
||||
}
|
||||
|
||||
private canonicalUrl(path: string): string {
|
||||
return ensureTrailingSlash(path.toString());
|
||||
}
|
||||
}
|
@ -12,10 +12,10 @@ import { ConflictHttpError } from '../../util/errors/ConflictHttpError';
|
||||
import { NotFoundHttpError } from '../../util/errors/NotFoundHttpError';
|
||||
import { isSystemError } from '../../util/errors/SystemError';
|
||||
import { UnsupportedMediaTypeHttpError } from '../../util/errors/UnsupportedMediaTypeHttpError';
|
||||
import type { MetadataController } from '../../util/MetadataController';
|
||||
import { parseQuads, pushQuad, serializeQuads } from '../../util/QuadUtil';
|
||||
import { generateContainmentQuads, generateResourceQuads } from '../../util/ResourceUtil';
|
||||
import { CONTENT_TYPE, DCTERMS, POSIX, RDF, XSD } from '../../util/UriConstants';
|
||||
import { toNamedNode, toTypedLiteral } from '../../util/UriUtil';
|
||||
import { pushQuad } from '../../util/Util';
|
||||
import type { FileIdentifierMapper, ResourceLink } from '../FileIdentifierMapper';
|
||||
import type { DataAccessor } from './DataAccessor';
|
||||
|
||||
@ -26,11 +26,9 @@ const { join: joinPath } = posix;
|
||||
*/
|
||||
export class FileDataAccessor implements DataAccessor {
|
||||
private readonly resourceMapper: FileIdentifierMapper;
|
||||
private readonly metadataController: MetadataController;
|
||||
|
||||
public constructor(resourceMapper: FileIdentifierMapper, metadataController: MetadataController) {
|
||||
public constructor(resourceMapper: FileIdentifierMapper) {
|
||||
this.resourceMapper = resourceMapper;
|
||||
this.metadataController = metadataController;
|
||||
}
|
||||
|
||||
/**
|
||||
@ -218,7 +216,7 @@ export class FileDataAccessor implements DataAccessor {
|
||||
|
||||
// Write metadata to file if there are quads remaining
|
||||
if (quads.length > 0) {
|
||||
const serializedMetadata = this.metadataController.serializeQuads(quads);
|
||||
const serializedMetadata = serializeQuads(quads);
|
||||
await this.writeDataFile(metadataPath, serializedMetadata);
|
||||
wroteMetadata = true;
|
||||
|
||||
@ -247,7 +245,7 @@ export class FileDataAccessor implements DataAccessor {
|
||||
Promise<RepresentationMetadata> {
|
||||
const metadata = new RepresentationMetadata(link.identifier.path)
|
||||
.addQuads(await this.getRawMetadata(link.identifier));
|
||||
metadata.addQuads(this.metadataController.generateResourceQuads(metadata.identifier as NamedNode, isContainer));
|
||||
metadata.addQuads(generateResourceQuads(metadata.identifier as NamedNode, isContainer));
|
||||
metadata.addQuads(this.generatePosixQuads(metadata.identifier as NamedNode, stats));
|
||||
return metadata;
|
||||
}
|
||||
@ -266,7 +264,7 @@ export class FileDataAccessor implements DataAccessor {
|
||||
await fsPromises.lstat(metadataPath);
|
||||
|
||||
const readMetadataStream = createReadStream(metadataPath);
|
||||
return await this.metadataController.parseQuads(readMetadataStream);
|
||||
return await parseQuads(readMetadataStream);
|
||||
} catch (error: unknown) {
|
||||
// Metadata file doesn't exist so lets keep `rawMetaData` an empty array.
|
||||
if (!isSystemError(error) || error.code !== 'ENOENT') {
|
||||
@ -306,13 +304,13 @@ export class FileDataAccessor implements DataAccessor {
|
||||
|
||||
// Generate metadata of this specific child
|
||||
const subject = DataFactory.namedNode(childLink.identifier.path);
|
||||
quads.push(...this.metadataController.generateResourceQuads(subject, childStats.isDirectory()));
|
||||
quads.push(...generateResourceQuads(subject, childStats.isDirectory()));
|
||||
quads.push(...this.generatePosixQuads(subject, childStats));
|
||||
childURIs.push(childLink.identifier.path);
|
||||
}
|
||||
|
||||
// Generate containment metadata
|
||||
const containsQuads = this.metadataController.generateContainerContainsResourceQuads(
|
||||
const containsQuads = generateContainmentQuads(
|
||||
DataFactory.namedNode(link.identifier.path), childURIs,
|
||||
);
|
||||
|
||||
|
@ -5,8 +5,8 @@ import type { NamedNode } from 'rdf-js';
|
||||
import { RepresentationMetadata } from '../../ldp/representation/RepresentationMetadata';
|
||||
import type { ResourceIdentifier } from '../../ldp/representation/ResourceIdentifier';
|
||||
import { NotFoundHttpError } from '../../util/errors/NotFoundHttpError';
|
||||
import type { MetadataController } from '../../util/MetadataController';
|
||||
import { ensureTrailingSlash } from '../../util/Util';
|
||||
import { ensureTrailingSlash } from '../../util/PathUtil';
|
||||
import { generateContainmentQuads, generateResourceQuads } from '../../util/ResourceUtil';
|
||||
import type { DataAccessor } from './DataAccessor';
|
||||
|
||||
interface DataEntry {
|
||||
@ -43,14 +43,12 @@ class ArrayReadable extends Readable {
|
||||
export class InMemoryDataAccessor implements DataAccessor {
|
||||
private readonly base: string;
|
||||
private readonly store: ContainerEntry;
|
||||
private readonly metadataController: MetadataController;
|
||||
|
||||
public constructor(base: string, metadataController: MetadataController) {
|
||||
public constructor(base: string) {
|
||||
this.base = ensureTrailingSlash(base);
|
||||
this.metadataController = metadataController;
|
||||
|
||||
const metadata = new RepresentationMetadata(this.base);
|
||||
metadata.addQuads(this.metadataController.generateResourceQuads(DataFactory.namedNode(this.base), true));
|
||||
metadata.addQuads(generateResourceQuads(DataFactory.namedNode(this.base), true));
|
||||
this.store = { entries: {}, metadata };
|
||||
}
|
||||
|
||||
@ -161,8 +159,7 @@ export class InMemoryDataAccessor implements DataAccessor {
|
||||
if (!this.isDataEntry(entry)) {
|
||||
const childNames = Object.keys(entry.entries).map((name): string =>
|
||||
`${identifier.path}${name}${this.isDataEntry(entry.entries[name]) ? '' : '/'}`);
|
||||
const quads = this.metadataController
|
||||
.generateContainerContainsResourceQuads(metadata.identifier as NamedNode, childNames);
|
||||
const quads = generateContainmentQuads(metadata.identifier as NamedNode, childNames);
|
||||
metadata.addQuads(quads);
|
||||
}
|
||||
return metadata;
|
||||
|
@ -23,11 +23,10 @@ import { ConflictHttpError } from '../../util/errors/ConflictHttpError';
|
||||
import { NotFoundHttpError } from '../../util/errors/NotFoundHttpError';
|
||||
import { UnsupportedHttpError } from '../../util/errors/UnsupportedHttpError';
|
||||
import { UnsupportedMediaTypeHttpError } from '../../util/errors/UnsupportedMediaTypeHttpError';
|
||||
import type { MetadataController } from '../../util/MetadataController';
|
||||
import { ensureTrailingSlash, getParentContainer } from '../../util/PathUtil';
|
||||
import { generateResourceQuads } from '../../util/ResourceUtil';
|
||||
import { CONTENT_TYPE, LDP } from '../../util/UriConstants';
|
||||
import { toNamedNode } from '../../util/UriUtil';
|
||||
import { ensureTrailingSlash } from '../../util/Util';
|
||||
import type { ContainerManager } from '../ContainerManager';
|
||||
import type { DataAccessor } from './DataAccessor';
|
||||
|
||||
const { defaultGraph, namedNode, quad, variable } = DataFactory;
|
||||
@ -48,17 +47,12 @@ export class SparqlDataAccessor implements DataAccessor {
|
||||
protected readonly logger = getLoggerFor(this);
|
||||
private readonly endpoint: string;
|
||||
private readonly base: string;
|
||||
private readonly containerManager: ContainerManager;
|
||||
private readonly metadataController: MetadataController;
|
||||
private readonly fetcher: SparqlEndpointFetcher;
|
||||
private readonly generator: SparqlGenerator;
|
||||
|
||||
public constructor(endpoint: string, base: string, containerManager: ContainerManager,
|
||||
metadataController: MetadataController) {
|
||||
public constructor(endpoint: string, base: string) {
|
||||
this.endpoint = endpoint;
|
||||
this.base = ensureTrailingSlash(base);
|
||||
this.containerManager = containerManager;
|
||||
this.metadataController = metadataController;
|
||||
this.fetcher = new SparqlEndpointFetcher();
|
||||
this.generator = new Generator();
|
||||
}
|
||||
@ -103,7 +97,7 @@ export class SparqlDataAccessor implements DataAccessor {
|
||||
|
||||
// Need to generate type metadata for the root container since it's not stored
|
||||
if (identifier.path === this.base) {
|
||||
metadata.addQuads(this.metadataController.generateResourceQuads(name, true));
|
||||
metadata.addQuads(generateResourceQuads(name, true));
|
||||
}
|
||||
|
||||
return metadata;
|
||||
@ -113,7 +107,7 @@ export class SparqlDataAccessor implements DataAccessor {
|
||||
* Writes the given metadata for the container.
|
||||
*/
|
||||
public async writeContainer(identifier: ResourceIdentifier, metadata: RepresentationMetadata): Promise<void> {
|
||||
const { name, parent } = await this.getRelatedNames(identifier);
|
||||
const { name, parent } = this.getRelatedNames(identifier);
|
||||
return this.sendSparqlUpdate(this.sparqlInsert(name, parent, metadata));
|
||||
}
|
||||
|
||||
@ -125,7 +119,7 @@ export class SparqlDataAccessor implements DataAccessor {
|
||||
if (this.isMetadataIdentifier(identifier)) {
|
||||
throw new ConflictHttpError('Not allowed to create NamedNodes with the metadata extension.');
|
||||
}
|
||||
const { name, parent } = await this.getRelatedNames(identifier);
|
||||
const { name, parent } = this.getRelatedNames(identifier);
|
||||
|
||||
const triples = await arrayifyStream(data) as Quad[];
|
||||
const def = defaultGraph();
|
||||
@ -143,15 +137,15 @@ export class SparqlDataAccessor implements DataAccessor {
|
||||
* Removes all graph data relevant to the given identifier.
|
||||
*/
|
||||
public async deleteResource(identifier: ResourceIdentifier): Promise<void> {
|
||||
const { name, parent } = await this.getRelatedNames(identifier);
|
||||
const { name, parent } = this.getRelatedNames(identifier);
|
||||
return this.sendSparqlUpdate(this.sparqlDelete(name, parent));
|
||||
}
|
||||
|
||||
/**
|
||||
* Helper function to get named nodes corresponding to the identifier and its parent container.
|
||||
*/
|
||||
private async getRelatedNames(identifier: ResourceIdentifier): Promise<{ name: NamedNode; parent: NamedNode }> {
|
||||
const parentIdentifier = await this.containerManager.getContainer(identifier);
|
||||
private getRelatedNames(identifier: ResourceIdentifier): { name: NamedNode; parent: NamedNode } {
|
||||
const parentIdentifier = getParentContainer(identifier);
|
||||
const name = namedNode(identifier.path);
|
||||
const parent = namedNode(parentIdentifier.path);
|
||||
return { name, parent };
|
||||
|
@ -1,7 +1,6 @@
|
||||
import type { Representation } from '../../ldp/representation/Representation';
|
||||
import { getLoggerFor } from '../../logging/LogUtil';
|
||||
import { matchingMediaType } from '../../util/Util';
|
||||
import { checkRequest } from './ConversionUtil';
|
||||
import { validateRequestArgs, matchingMediaType } from './ConversionUtil';
|
||||
import type { RepresentationConverterArgs } from './RepresentationConverter';
|
||||
import { TypedRepresentationConverter } from './TypedRepresentationConverter';
|
||||
|
||||
@ -48,7 +47,7 @@ export class ChainedConverter extends TypedRepresentationConverter {
|
||||
// So we only check if the input can be parsed and the preferred type can be written
|
||||
const inTypes = this.filterTypes(await this.first.getInputTypes());
|
||||
const outTypes = this.filterTypes(await this.last.getOutputTypes());
|
||||
checkRequest(input, inTypes, outTypes);
|
||||
validateRequestArgs(input, inTypes, outTypes);
|
||||
}
|
||||
|
||||
private filterTypes(typeVals: Record<string, number>): string[] {
|
||||
|
@ -3,7 +3,6 @@ import type { RepresentationPreferences } from '../../ldp/representation/Represe
|
||||
import { INTERNAL_ALL } from '../../util/ContentTypes';
|
||||
import { InternalServerError } from '../../util/errors/InternalServerError';
|
||||
import { UnsupportedHttpError } from '../../util/errors/UnsupportedHttpError';
|
||||
import { matchingMediaType } from '../../util/Util';
|
||||
import type { RepresentationConverterArgs } from './RepresentationConverter';
|
||||
|
||||
/**
|
||||
@ -57,6 +56,33 @@ RepresentationPreference[] => {
|
||||
return weightedSupported.filter((preference): boolean => preference.weight !== 0);
|
||||
};
|
||||
|
||||
/**
|
||||
* Checks if the given two media types/ranges match each other.
|
||||
* Takes wildcards into account.
|
||||
* @param mediaA - Media type to match.
|
||||
* @param mediaB - Media type to match.
|
||||
*
|
||||
* @returns True if the media type patterns can match each other.
|
||||
*/
|
||||
export const matchingMediaType = (mediaA: string, mediaB: string): boolean => {
|
||||
if (mediaA === mediaB) {
|
||||
return true;
|
||||
}
|
||||
|
||||
const [ typeA, subTypeA ] = mediaA.split('/');
|
||||
const [ typeB, subTypeB ] = mediaB.split('/');
|
||||
if (typeA === '*' || typeB === '*') {
|
||||
return true;
|
||||
}
|
||||
if (typeA !== typeB) {
|
||||
return false;
|
||||
}
|
||||
if (subTypeA === '*' || subTypeB === '*') {
|
||||
return true;
|
||||
}
|
||||
return subTypeA === subTypeB;
|
||||
};
|
||||
|
||||
/**
|
||||
* Runs some standard checks on the input request:
|
||||
* - Checks if there is a content type for the input.
|
||||
@ -66,8 +92,8 @@ RepresentationPreference[] => {
|
||||
* @param supportedIn - Media types that can be parsed by the converter.
|
||||
* @param supportedOut - Media types that can be produced by the converter.
|
||||
*/
|
||||
export const checkRequest = (request: RepresentationConverterArgs, supportedIn: string[], supportedOut: string[]):
|
||||
void => {
|
||||
export const validateRequestArgs = (request: RepresentationConverterArgs, supportedIn: string[],
|
||||
supportedOut: string[]): void => {
|
||||
const inType = request.representation.metadata.contentType;
|
||||
if (!inType) {
|
||||
throw new UnsupportedHttpError('Input type required for conversion.');
|
||||
|
@ -5,7 +5,7 @@ import { RepresentationMetadata } from '../../ldp/representation/RepresentationM
|
||||
import type { RepresentationPreferences } from '../../ldp/representation/RepresentationPreferences';
|
||||
import { INTERNAL_QUADS } from '../../util/ContentTypes';
|
||||
import { CONTENT_TYPE } from '../../util/UriConstants';
|
||||
import { checkRequest, matchingTypes } from './ConversionUtil';
|
||||
import { validateRequestArgs, matchingTypes } from './ConversionUtil';
|
||||
import type { RepresentationConverterArgs } from './RepresentationConverter';
|
||||
import { TypedRepresentationConverter } from './TypedRepresentationConverter';
|
||||
|
||||
@ -22,7 +22,7 @@ export class QuadToRdfConverter extends TypedRepresentationConverter {
|
||||
}
|
||||
|
||||
public async canHandle(input: RepresentationConverterArgs): Promise<void> {
|
||||
checkRequest(input, [ INTERNAL_QUADS ], await rdfSerializer.getContentTypes());
|
||||
validateRequestArgs(input, [ INTERNAL_QUADS ], await rdfSerializer.getContentTypes());
|
||||
}
|
||||
|
||||
public async handle(input: RepresentationConverterArgs): Promise<Representation> {
|
||||
|
@ -4,9 +4,9 @@ import type { Representation } from '../../ldp/representation/Representation';
|
||||
import { RepresentationMetadata } from '../../ldp/representation/RepresentationMetadata';
|
||||
import { INTERNAL_QUADS } from '../../util/ContentTypes';
|
||||
import { UnsupportedHttpError } from '../../util/errors/UnsupportedHttpError';
|
||||
import { pipeSafely } from '../../util/StreamUtil';
|
||||
import { CONTENT_TYPE } from '../../util/UriConstants';
|
||||
import { pipeSafe } from '../../util/Util';
|
||||
import { checkRequest } from './ConversionUtil';
|
||||
import { validateRequestArgs } from './ConversionUtil';
|
||||
import type { RepresentationConverterArgs } from './RepresentationConverter';
|
||||
import { TypedRepresentationConverter } from './TypedRepresentationConverter';
|
||||
|
||||
@ -23,7 +23,7 @@ export class RdfToQuadConverter extends TypedRepresentationConverter {
|
||||
}
|
||||
|
||||
public async canHandle(input: RepresentationConverterArgs): Promise<void> {
|
||||
checkRequest(input, await rdfParser.getContentTypes(), [ INTERNAL_QUADS ]);
|
||||
validateRequestArgs(input, await rdfParser.getContentTypes(), [ INTERNAL_QUADS ]);
|
||||
}
|
||||
|
||||
public async handle(input: RepresentationConverterArgs): Promise<Representation> {
|
||||
@ -40,7 +40,7 @@ export class RdfToQuadConverter extends TypedRepresentationConverter {
|
||||
// Wrap the stream such that errors are transformed
|
||||
// (Node 10 requires both writableObjectMode and readableObjectMode)
|
||||
const pass = new PassThrough({ writableObjectMode: true, readableObjectMode: true });
|
||||
const data = pipeSafe(rawQuads, pass, (error): Error => new UnsupportedHttpError(error.message));
|
||||
const data = pipeSafely(rawQuads, pass, (error): Error => new UnsupportedHttpError(error.message));
|
||||
|
||||
return {
|
||||
binary: false,
|
||||
|
@ -1,7 +1,7 @@
|
||||
import type { Representation } from '../../ldp/representation/Representation';
|
||||
import type { ResourceIdentifier } from '../../ldp/representation/ResourceIdentifier';
|
||||
import { UnsupportedHttpError } from '../../util/errors/UnsupportedHttpError';
|
||||
import { trimTrailingSlashes } from '../../util/Util';
|
||||
import { trimTrailingSlashes } from '../../util/PathUtil';
|
||||
import type { ResourceStore } from '../ResourceStore';
|
||||
import { RouterRule } from './RouterRule';
|
||||
|
||||
|
@ -1,4 +1,5 @@
|
||||
import { getLoggerFor } from '../logging/LogUtil';
|
||||
import type { HttpResponse } from '../server/HttpResponse';
|
||||
import { UnsupportedHttpError } from './errors/UnsupportedHttpError';
|
||||
|
||||
const logger = getLoggerFor('HeaderUtil');
|
||||
@ -355,3 +356,25 @@ export const parseAcceptLanguage = (input: string): AcceptLanguage[] => {
|
||||
});
|
||||
return results;
|
||||
};
|
||||
|
||||
/**
|
||||
* Adds a header value without overriding previous values.
|
||||
*/
|
||||
export const addHeader = (response: HttpResponse, name: string, value: string | string[]): void => {
|
||||
let allValues: string[] = [];
|
||||
if (response.hasHeader(name)) {
|
||||
let oldValues = response.getHeader(name)!;
|
||||
if (typeof oldValues === 'string') {
|
||||
oldValues = [ oldValues ];
|
||||
} else if (typeof oldValues === 'number') {
|
||||
oldValues = [ `${oldValues}` ];
|
||||
}
|
||||
allValues = oldValues;
|
||||
}
|
||||
if (Array.isArray(value)) {
|
||||
allValues.push(...value);
|
||||
} else {
|
||||
allValues.push(value);
|
||||
}
|
||||
response.setHeader(name, allValues.length === 1 ? allValues[0] : allValues);
|
||||
};
|
||||
|
@ -1,61 +0,0 @@
|
||||
import type { Readable } from 'stream';
|
||||
import arrayifyStream from 'arrayify-stream';
|
||||
import { DataFactory, StreamParser, StreamWriter } from 'n3';
|
||||
import type { NamedNode, Quad } from 'rdf-js';
|
||||
import streamifyArray from 'streamify-array';
|
||||
import { RepresentationMetadata } from '../ldp/representation/RepresentationMetadata';
|
||||
import { TEXT_TURTLE } from './ContentTypes';
|
||||
import { LDP, RDF } from './UriConstants';
|
||||
import { toNamedNode } from './UriUtil';
|
||||
import { pipeSafe, pushQuad } from './Util';
|
||||
|
||||
export class MetadataController {
|
||||
/**
|
||||
* Helper function to generate type quads for a Container or Resource.
|
||||
* @param subject - Subject for the new quads.
|
||||
* @param isContainer - If the identifier corresponds to a container.
|
||||
*
|
||||
* @returns The generated quads.
|
||||
*/
|
||||
public generateResourceQuads(subject: NamedNode, isContainer: boolean): Quad[] {
|
||||
const quads: Quad[] = [];
|
||||
if (isContainer) {
|
||||
pushQuad(quads, subject, toNamedNode(RDF.type), toNamedNode(LDP.Container));
|
||||
pushQuad(quads, subject, toNamedNode(RDF.type), toNamedNode(LDP.BasicContainer));
|
||||
}
|
||||
pushQuad(quads, subject, toNamedNode(RDF.type), toNamedNode(LDP.Resource));
|
||||
|
||||
return quads;
|
||||
}
|
||||
|
||||
/**
|
||||
* Helper function to generate the quads describing that the resource URIs are children of the container URI.
|
||||
* @param containerURI - The URI of the container.
|
||||
* @param childURIs - The URI of the child resources.
|
||||
*
|
||||
* @returns The generated quads.
|
||||
*/
|
||||
public generateContainerContainsResourceQuads(containerURI: NamedNode, childURIs: string[]): Quad[] {
|
||||
return new RepresentationMetadata(containerURI, { [LDP.contains]: childURIs.map(DataFactory.namedNode) }).quads();
|
||||
}
|
||||
|
||||
/**
|
||||
* Helper function for serializing an array of quads, with as result a Readable object.
|
||||
* @param quads - The array of quads.
|
||||
*
|
||||
* @returns The Readable object.
|
||||
*/
|
||||
public serializeQuads(quads: Quad[]): Readable {
|
||||
return pipeSafe(streamifyArray(quads), new StreamWriter({ format: TEXT_TURTLE }));
|
||||
}
|
||||
|
||||
/**
|
||||
* Helper function to convert a Readable into an array of quads.
|
||||
* @param readable - The readable object.
|
||||
*
|
||||
* @returns A promise containing the array of quads.
|
||||
*/
|
||||
public async parseQuads(readable: Readable): Promise<Quad[]> {
|
||||
return await arrayifyStream(pipeSafe(readable, new StreamParser({ format: TEXT_TURTLE })));
|
||||
}
|
||||
}
|
60
src/util/PathUtil.ts
Normal file
60
src/util/PathUtil.ts
Normal file
@ -0,0 +1,60 @@
|
||||
import type { ResourceIdentifier } from '../ldp/representation/ResourceIdentifier';
|
||||
import { InternalServerError } from './errors/InternalServerError';
|
||||
|
||||
/**
|
||||
* Makes sure the input path has exactly 1 slash at the end.
|
||||
* Multiple slashes will get merged into one.
|
||||
* If there is no slash it will be added.
|
||||
*
|
||||
* @param path - Path to check.
|
||||
*
|
||||
* @returns The potentially changed path.
|
||||
*/
|
||||
export const ensureTrailingSlash = (path: string): string => path.replace(/\/*$/u, '/');
|
||||
|
||||
/**
|
||||
* Makes sure the input path has no slashes at the end.
|
||||
*
|
||||
* @param path - Path to check.
|
||||
*
|
||||
* @returns The potentially changed path.
|
||||
*/
|
||||
export const trimTrailingSlashes = (path: string): string => path.replace(/\/+$/u, '');
|
||||
|
||||
/**
|
||||
* Converts a URI path to the canonical version by splitting on slashes,
|
||||
* decoding any percent-based encodings,
|
||||
* and then encoding any special characters.
|
||||
*/
|
||||
export const toCanonicalUriPath = (path: string): string => path.split('/').map((part): string =>
|
||||
encodeURIComponent(decodeURIComponent(part))).join('/');
|
||||
|
||||
/**
|
||||
* Decodes all components of a URI path.
|
||||
*/
|
||||
export const decodeUriPathComponents = (path: string): string => path.split('/').map(decodeURIComponent).join('/');
|
||||
|
||||
/**
|
||||
* Encodes all (non-slash) special characters in a URI path.
|
||||
*/
|
||||
export const encodeUriPathComponents = (path: string): string => path.split('/').map(encodeURIComponent).join('/');
|
||||
|
||||
/**
|
||||
* Finds the container containing the given resource.
|
||||
* This does not ensure either the container or resource actually exist.
|
||||
*
|
||||
* @param id - Identifier to find container of.
|
||||
*
|
||||
* @returns The identifier of the container this resource is in.
|
||||
*/
|
||||
export const getParentContainer = (id: ResourceIdentifier): ResourceIdentifier => {
|
||||
// Trailing slash is necessary for URL library
|
||||
const parentPath = new URL('..', ensureTrailingSlash(id.path)).toString();
|
||||
|
||||
// This probably means there is an issue with the root
|
||||
if (parentPath === id.path) {
|
||||
throw new InternalServerError('URL root reached');
|
||||
}
|
||||
|
||||
return { path: parentPath };
|
||||
};
|
32
src/util/QuadUtil.ts
Normal file
32
src/util/QuadUtil.ts
Normal file
@ -0,0 +1,32 @@
|
||||
import type { Readable } from 'stream';
|
||||
import arrayifyStream from 'arrayify-stream';
|
||||
import { DataFactory, StreamParser, StreamWriter } from 'n3';
|
||||
import type { Literal, NamedNode, Quad } from 'rdf-js';
|
||||
import streamifyArray from 'streamify-array';
|
||||
import { TEXT_TURTLE } from './ContentTypes';
|
||||
import { pipeSafely } from './StreamUtil';
|
||||
|
||||
/**
|
||||
* Generates a quad with the given subject/predicate/object and pushes it to the given array.
|
||||
*/
|
||||
export const pushQuad =
|
||||
(quads: Quad[], subject: NamedNode, predicate: NamedNode, object: NamedNode | Literal): number =>
|
||||
quads.push(DataFactory.quad(subject, predicate, object));
|
||||
|
||||
/**
|
||||
* Helper function for serializing an array of quads, with as result a Readable object.
|
||||
* @param quads - The array of quads.
|
||||
*
|
||||
* @returns The Readable object.
|
||||
*/
|
||||
export const serializeQuads = (quads: Quad[]): Readable =>
|
||||
pipeSafely(streamifyArray(quads), new StreamWriter({ format: TEXT_TURTLE }));
|
||||
|
||||
/**
|
||||
* Helper function to convert a Readable into an array of quads.
|
||||
* @param readable - The readable object.
|
||||
*
|
||||
* @returns A promise containing the array of quads.
|
||||
*/
|
||||
export const parseQuads = async(readable: Readable): Promise<Quad[]> =>
|
||||
arrayifyStream(pipeSafely(readable, new StreamParser({ format: TEXT_TURTLE })));
|
34
src/util/ResourceUtil.ts
Normal file
34
src/util/ResourceUtil.ts
Normal file
@ -0,0 +1,34 @@
|
||||
import { DataFactory } from 'n3';
|
||||
import type { NamedNode, Quad } from 'rdf-js';
|
||||
import { RepresentationMetadata } from '../ldp/representation/RepresentationMetadata';
|
||||
import { pushQuad } from './QuadUtil';
|
||||
import { LDP, RDF } from './UriConstants';
|
||||
import { toNamedNode } from './UriUtil';
|
||||
|
||||
/**
|
||||
* Helper function to generate type quads for a Container or Resource.
|
||||
* @param subject - Subject for the new quads.
|
||||
* @param isContainer - If the identifier corresponds to a container.
|
||||
*
|
||||
* @returns The generated quads.
|
||||
*/
|
||||
export const generateResourceQuads = (subject: NamedNode, isContainer: boolean): Quad[] => {
|
||||
const quads: Quad[] = [];
|
||||
if (isContainer) {
|
||||
pushQuad(quads, subject, toNamedNode(RDF.type), toNamedNode(LDP.Container));
|
||||
pushQuad(quads, subject, toNamedNode(RDF.type), toNamedNode(LDP.BasicContainer));
|
||||
}
|
||||
pushQuad(quads, subject, toNamedNode(RDF.type), toNamedNode(LDP.Resource));
|
||||
|
||||
return quads;
|
||||
};
|
||||
|
||||
/**
|
||||
* Helper function to generate the quads describing that the resource URIs are children of the container URI.
|
||||
* @param containerURI - The URI of the container.
|
||||
* @param childURIs - The URI of the child resources.
|
||||
*
|
||||
* @returns The generated quads.
|
||||
*/
|
||||
export const generateContainmentQuads = (containerURI: NamedNode, childURIs: string[]): Quad[] =>
|
||||
new RepresentationMetadata(containerURI, { [LDP.contains]: childURIs.map(DataFactory.namedNode) }).quads();
|
38
src/util/StreamUtil.ts
Normal file
38
src/util/StreamUtil.ts
Normal file
@ -0,0 +1,38 @@
|
||||
import type { Readable, Writable } from 'stream';
|
||||
import arrayifyStream from 'arrayify-stream';
|
||||
import { getLoggerFor } from '../logging/LogUtil';
|
||||
|
||||
const logger = getLoggerFor('StreamUtil');
|
||||
|
||||
/**
|
||||
* Joins all strings of a stream.
|
||||
* @param stream - Stream of strings.
|
||||
*
|
||||
* @returns The joined string.
|
||||
*/
|
||||
export const readableToString = async(stream: Readable): Promise<string> => (await arrayifyStream(stream)).join('');
|
||||
|
||||
/**
|
||||
* Pipes one stream into another and emits errors of the first stream with the second.
|
||||
* In case of an error in the first stream the second one will be destroyed with the given error.
|
||||
* @param readable - Initial readable stream.
|
||||
* @param destination - The destination for writing data.
|
||||
* @param mapError - Optional function that takes the error and converts it to a new error.
|
||||
*
|
||||
* @returns The destination stream.
|
||||
*/
|
||||
export const pipeSafely = <T extends Writable>(readable: NodeJS.ReadableStream, destination: T,
|
||||
mapError?: (error: Error) => Error): T => {
|
||||
// Not using `stream.pipeline` since the result there only emits an error event if the last stream has the error
|
||||
readable.pipe(destination);
|
||||
readable.on('error', (error): void => {
|
||||
logger.warn(`Piped stream errored with ${error.message}`);
|
||||
|
||||
// From https://nodejs.org/api/stream.html#stream_readable_pipe_destination_options :
|
||||
// "One important caveat is that if the Readable stream emits an error during processing, the Writable destination
|
||||
// is not closed automatically. If an error occurs, it will be necessary to manually close each stream
|
||||
// in order to prevent memory leaks."
|
||||
destination.destroy(mapError ? mapError(error) : error);
|
||||
});
|
||||
return destination;
|
||||
};
|
131
src/util/Util.ts
131
src/util/Util.ts
@ -1,131 +0,0 @@
|
||||
import type { Readable, Writable } from 'stream';
|
||||
import arrayifyStream from 'arrayify-stream';
|
||||
import { DataFactory } from 'n3';
|
||||
import type { Literal, NamedNode, Quad } from 'rdf-js';
|
||||
import { getLoggerFor } from '../logging/LogUtil';
|
||||
import type { HttpResponse } from '../server/HttpResponse';
|
||||
|
||||
const logger = getLoggerFor('Util');
|
||||
|
||||
/**
|
||||
* Makes sure the input path has exactly 1 slash at the end.
|
||||
* Multiple slashes will get merged into one.
|
||||
* If there is no slash it will be added.
|
||||
*
|
||||
* @param path - Path to check.
|
||||
*
|
||||
* @returns The potentially changed path.
|
||||
*/
|
||||
export const ensureTrailingSlash = (path: string): string => path.replace(/\/*$/u, '/');
|
||||
|
||||
/**
|
||||
* Joins all strings of a stream.
|
||||
* @param stream - Stream of strings.
|
||||
*
|
||||
* @returns The joined string.
|
||||
*/
|
||||
export const readableToString = async(stream: Readable): Promise<string> => (await arrayifyStream(stream)).join('');
|
||||
|
||||
/**
|
||||
* Makes sure the input path has no slashes at the end.
|
||||
*
|
||||
* @param path - Path to check.
|
||||
*
|
||||
* @returns The potentially changed path.
|
||||
*/
|
||||
export const trimTrailingSlashes = (path: string): string => path.replace(/\/+$/u, '');
|
||||
|
||||
/**
|
||||
* Checks if the given two media types/ranges match each other.
|
||||
* Takes wildcards into account.
|
||||
* @param mediaA - Media type to match.
|
||||
* @param mediaB - Media type to match.
|
||||
*
|
||||
* @returns True if the media type patterns can match each other.
|
||||
*/
|
||||
export const matchingMediaType = (mediaA: string, mediaB: string): boolean => {
|
||||
const [ typeA, subTypeA ] = mediaA.split('/');
|
||||
const [ typeB, subTypeB ] = mediaB.split('/');
|
||||
if (typeA === '*' || typeB === '*') {
|
||||
return true;
|
||||
}
|
||||
if (typeA !== typeB) {
|
||||
return false;
|
||||
}
|
||||
if (subTypeA === '*' || subTypeB === '*') {
|
||||
return true;
|
||||
}
|
||||
return subTypeA === subTypeB;
|
||||
};
|
||||
|
||||
/**
|
||||
* Pipes one stream into another and emits errors of the first stream with the second.
|
||||
* In case of an error in the first stream the second one will be destroyed with the given error.
|
||||
* @param readable - Initial readable stream.
|
||||
* @param destination - The destination for writing data.
|
||||
* @param mapError - Optional function that takes the error and converts it to a new error.
|
||||
*
|
||||
* @returns The destination stream.
|
||||
*/
|
||||
export const pipeSafe = <T extends Writable>(readable: NodeJS.ReadableStream, destination: T,
|
||||
mapError?: (error: Error) => Error): T => {
|
||||
// Not using `stream.pipeline` since the result there only emits an error event if the last stream has the error
|
||||
readable.pipe(destination);
|
||||
readable.on('error', (error): void => {
|
||||
logger.warn(`Piped stream errored with ${error.message}`);
|
||||
|
||||
// From https://nodejs.org/api/stream.html#stream_readable_pipe_destination_options :
|
||||
// "One important caveat is that if the Readable stream emits an error during processing, the Writable destination
|
||||
// is not closed automatically. If an error occurs, it will be necessary to manually close each stream
|
||||
// in order to prevent memory leaks."
|
||||
destination.destroy(mapError ? mapError(error) : error);
|
||||
});
|
||||
return destination;
|
||||
};
|
||||
|
||||
/**
|
||||
* Converts a URI path to the canonical version by splitting on slashes,
|
||||
* decoding any percent-based encodings,
|
||||
* and then encoding any special characters.
|
||||
*/
|
||||
export const toCanonicalUriPath = (path: string): string => path.split('/').map((part): string =>
|
||||
encodeURIComponent(decodeURIComponent(part))).join('/');
|
||||
|
||||
/**
|
||||
* Decodes all components of a URI path.
|
||||
*/
|
||||
export const decodeUriPathComponents = (path: string): string => path.split('/').map(decodeURIComponent).join('/');
|
||||
|
||||
/**
|
||||
* Encodes all (non-slash) special characters in a URI path.
|
||||
*/
|
||||
export const encodeUriPathComponents = (path: string): string => path.split('/').map(encodeURIComponent).join('/');
|
||||
|
||||
/**
|
||||
* Generates a quad with the given subject/predicate/object and pushes it to the given array.
|
||||
*/
|
||||
export const pushQuad =
|
||||
(quads: Quad[], subject: NamedNode, predicate: NamedNode, object: NamedNode | Literal): number =>
|
||||
quads.push(DataFactory.quad(subject, predicate, object));
|
||||
|
||||
/**
|
||||
* Adds a header value without overriding previous values.
|
||||
*/
|
||||
export const addHeader = (response: HttpResponse, name: string, value: string | string[]): void => {
|
||||
let allValues: string[] = [];
|
||||
if (response.hasHeader(name)) {
|
||||
let oldValues = response.getHeader(name)!;
|
||||
if (typeof oldValues === 'string') {
|
||||
oldValues = [ oldValues ];
|
||||
} else if (typeof oldValues === 'number') {
|
||||
oldValues = [ `${oldValues}` ];
|
||||
}
|
||||
allValues = oldValues;
|
||||
}
|
||||
if (Array.isArray(value)) {
|
||||
allValues.push(...value);
|
||||
} else {
|
||||
allValues.push(value);
|
||||
}
|
||||
response.setHeader(name, allValues.length === 1 ? allValues[0] : allValues);
|
||||
};
|
@ -51,7 +51,7 @@ export class AuthenticatedDataAccessorBasedConfig implements ServerConfig {
|
||||
const operationHandler = getOperationHandler(this.store);
|
||||
|
||||
const responseWriter = getResponseWriter();
|
||||
const authorizer = getWebAclAuthorizer(this.store, this.base);
|
||||
const authorizer = getWebAclAuthorizer(this.store);
|
||||
|
||||
const handler = new AuthenticatedLdpHandler({
|
||||
requestParser,
|
||||
|
@ -27,7 +27,6 @@ import {
|
||||
LinkRelMetadataWriter,
|
||||
LinkTypeParser,
|
||||
MappedMetadataWriter,
|
||||
MetadataController,
|
||||
PatchingStore,
|
||||
PatchOperationHandler,
|
||||
PostOperationHandler,
|
||||
@ -38,7 +37,6 @@ import {
|
||||
SlugParser,
|
||||
SparqlUpdatePatchHandler,
|
||||
UrlBasedAclManager,
|
||||
UrlContainerManager,
|
||||
WebAclAuthorizer,
|
||||
} from '../../index';
|
||||
import { CONTENT_TYPE, HTTP, RDF } from '../../src/util/UriConstants';
|
||||
@ -52,19 +50,14 @@ export const BASE = 'http://test.com';
|
||||
export const getRootFilePath = (subfolder: string): string => join(__dirname, '../testData', subfolder);
|
||||
|
||||
/**
|
||||
* Gives a file data accessor store based on (default) runtime config.
|
||||
* Gives a data accessor store with the given data accessor.
|
||||
* @param base - Base URL.
|
||||
* @param rootFilepath - The root file path.
|
||||
* @param dataAccessor - DataAccessor to use.
|
||||
*
|
||||
* @returns The data accessor based store.
|
||||
*/
|
||||
export const getDataAccessorStore = (base: string, dataAccessor: DataAccessor): DataAccessorBasedStore =>
|
||||
new DataAccessorBasedStore(
|
||||
dataAccessor,
|
||||
base,
|
||||
new MetadataController(),
|
||||
new UrlContainerManager(base),
|
||||
);
|
||||
new DataAccessorBasedStore(dataAccessor, base);
|
||||
|
||||
/**
|
||||
* Gives an in memory resource store based on (default) base url.
|
||||
@ -73,7 +66,7 @@ export const getDataAccessorStore = (base: string, dataAccessor: DataAccessor):
|
||||
* @returns The in memory resource store.
|
||||
*/
|
||||
export const getInMemoryResourceStore = (base = BASE): DataAccessorBasedStore =>
|
||||
getDataAccessorStore(base, new InMemoryDataAccessor(BASE, new MetadataController()));
|
||||
getDataAccessorStore(base, new InMemoryDataAccessor(BASE));
|
||||
|
||||
/**
|
||||
* Gives a converting store given some converters.
|
||||
@ -172,15 +165,11 @@ export const getBasicRequestParser = (bodyParsers: BodyParser[] = []): BasicRequ
|
||||
};
|
||||
|
||||
/**
|
||||
* Gives a web acl authorizer, using a UrlContainerManager & based on a (default) runtimeConfig.
|
||||
* Gives a web acl authorizer based on a (default) runtimeConfig.
|
||||
* @param store - Initial resource store.
|
||||
* @param base - Base URI of the pod.
|
||||
* @param aclManager - Optional acl manager, default is UrlBasedAclManager.
|
||||
*
|
||||
* @returns The acl authorizer.
|
||||
*/
|
||||
export const getWebAclAuthorizer =
|
||||
(store: ResourceStore, base = BASE, aclManager = new UrlBasedAclManager()): WebAclAuthorizer => {
|
||||
const containerManager = new UrlContainerManager(base);
|
||||
return new WebAclAuthorizer(aclManager, containerManager, store);
|
||||
};
|
||||
export const getWebAclAuthorizer = (store: ResourceStore, aclManager = new UrlBasedAclManager()): WebAclAuthorizer =>
|
||||
new WebAclAuthorizer(aclManager, store);
|
||||
|
@ -5,9 +5,8 @@ import { RepresentationMetadata } from '../../src/ldp/representation/Representat
|
||||
import { FileDataAccessor } from '../../src/storage/accessors/FileDataAccessor';
|
||||
import { InMemoryDataAccessor } from '../../src/storage/accessors/InMemoryDataAccessor';
|
||||
import { ExtensionBasedMapper } from '../../src/storage/ExtensionBasedMapper';
|
||||
import { MetadataController } from '../../src/util/MetadataController';
|
||||
import { ensureTrailingSlash } from '../../src/util/PathUtil';
|
||||
import { CONTENT_TYPE, LDP } from '../../src/util/UriConstants';
|
||||
import { ensureTrailingSlash } from '../../src/util/Util';
|
||||
import { AuthenticatedDataAccessorBasedConfig } from '../configs/AuthenticatedDataAccessorBasedConfig';
|
||||
import type { ServerConfig } from '../configs/ServerConfig';
|
||||
import { BASE, getRootFilePath } from '../configs/Util';
|
||||
@ -16,12 +15,11 @@ import { AclTestHelper, FileTestHelper } from '../util/TestHelpers';
|
||||
const dataAccessorStore: [string, (rootFilePath: string) => ServerConfig] = [
|
||||
'AuthenticatedFileDataAccessorBasedStore',
|
||||
(rootFilePath: string): ServerConfig => new AuthenticatedDataAccessorBasedConfig(BASE,
|
||||
new FileDataAccessor(new ExtensionBasedMapper(BASE, rootFilePath), new MetadataController())),
|
||||
new FileDataAccessor(new ExtensionBasedMapper(BASE, rootFilePath))),
|
||||
];
|
||||
const inMemoryDataAccessorStore: [string, (rootFilePath: string) => ServerConfig] = [
|
||||
'AuthenticatedInMemoryDataAccessorBasedStore',
|
||||
(): ServerConfig => new AuthenticatedDataAccessorBasedConfig(BASE,
|
||||
new InMemoryDataAccessor(BASE, new MetadataController())),
|
||||
(): ServerConfig => new AuthenticatedDataAccessorBasedConfig(BASE, new InMemoryDataAccessor(BASE)),
|
||||
];
|
||||
|
||||
describe.each([ dataAccessorStore, inMemoryDataAccessorStore ])('A server using a %s', (name, configFn): void => {
|
||||
|
@ -4,7 +4,6 @@ import type { HttpHandler } from '../../src/server/HttpHandler';
|
||||
import { FileDataAccessor } from '../../src/storage/accessors/FileDataAccessor';
|
||||
import { InMemoryDataAccessor } from '../../src/storage/accessors/InMemoryDataAccessor';
|
||||
import { ExtensionBasedMapper } from '../../src/storage/ExtensionBasedMapper';
|
||||
import { MetadataController } from '../../src/util/MetadataController';
|
||||
import { LDP } from '../../src/util/UriConstants';
|
||||
import { DataAccessorBasedConfig } from '../configs/DataAccessorBasedConfig';
|
||||
import type { ServerConfig } from '../configs/ServerConfig';
|
||||
@ -14,12 +13,11 @@ import { FileTestHelper } from '../util/TestHelpers';
|
||||
const fileDataAccessorStore: [string, (rootFilePath: string) => ServerConfig] = [
|
||||
'FileDataAccessorBasedStore',
|
||||
(rootFilePath: string): ServerConfig => new DataAccessorBasedConfig(BASE,
|
||||
new FileDataAccessor(new ExtensionBasedMapper(BASE, rootFilePath), new MetadataController())),
|
||||
new FileDataAccessor(new ExtensionBasedMapper(BASE, rootFilePath))),
|
||||
];
|
||||
const inMemoryDataAccessorStore: [string, (rootFilePath: string) => ServerConfig] = [
|
||||
'InMemoryDataAccessorBasedStore',
|
||||
(): ServerConfig => new DataAccessorBasedConfig(BASE,
|
||||
new InMemoryDataAccessor(BASE, new MetadataController())),
|
||||
(): ServerConfig => new DataAccessorBasedConfig(BASE, new InMemoryDataAccessor(BASE)),
|
||||
];
|
||||
|
||||
const configs = [ fileDataAccessorStore, inMemoryDataAccessorStore ];
|
||||
|
@ -4,8 +4,8 @@ import { RepresentationMetadata } from '../../src/ldp/representation/Representat
|
||||
import { ChainedConverter } from '../../src/storage/conversion/ChainedConverter';
|
||||
import { QuadToRdfConverter } from '../../src/storage/conversion/QuadToRdfConverter';
|
||||
import { RdfToQuadConverter } from '../../src/storage/conversion/RdfToQuadConverter';
|
||||
import { readableToString } from '../../src/util/StreamUtil';
|
||||
import { CONTENT_TYPE } from '../../src/util/UriConstants';
|
||||
import { readableToString } from '../../src/util/Util';
|
||||
|
||||
describe('A ChainedConverter', (): void => {
|
||||
const converters = [
|
||||
|
@ -1,7 +1,5 @@
|
||||
import { SparqlDataAccessor } from '../../src/storage/accessors/SparqlDataAccessor';
|
||||
import { UrlContainerManager } from '../../src/storage/UrlContainerManager';
|
||||
import { INTERNAL_QUADS } from '../../src/util/ContentTypes';
|
||||
import { MetadataController } from '../../src/util/MetadataController';
|
||||
import { DataAccessorBasedConfig } from '../configs/DataAccessorBasedConfig';
|
||||
import { BASE } from '../configs/Util';
|
||||
import { describeIf, FileTestHelper } from '../util/TestHelpers';
|
||||
@ -9,10 +7,7 @@ import { describeIf, FileTestHelper } from '../util/TestHelpers';
|
||||
describeIf('docker', 'a server with a SPARQL endpoint as storage', (): void => {
|
||||
describe('without acl', (): void => {
|
||||
const config = new DataAccessorBasedConfig(BASE,
|
||||
new SparqlDataAccessor('http://localhost:4000/sparql',
|
||||
BASE,
|
||||
new UrlContainerManager(BASE),
|
||||
new MetadataController()),
|
||||
new SparqlDataAccessor('http://localhost:4000/sparql', BASE),
|
||||
INTERNAL_QUADS);
|
||||
const handler = config.getHttpHandler();
|
||||
const fileHelper = new FileTestHelper(handler, new URL(BASE));
|
||||
|
@ -6,11 +6,11 @@ import { WebAclAuthorizer } from '../../../src/authorization/WebAclAuthorizer';
|
||||
import type { PermissionSet } from '../../../src/ldp/permissions/PermissionSet';
|
||||
import type { Representation } from '../../../src/ldp/representation/Representation';
|
||||
import type { ResourceIdentifier } from '../../../src/ldp/representation/ResourceIdentifier';
|
||||
import type { ContainerManager } from '../../../src/storage/ContainerManager';
|
||||
import type { ResourceStore } from '../../../src/storage/ResourceStore';
|
||||
import { ForbiddenHttpError } from '../../../src/util/errors/ForbiddenHttpError';
|
||||
import { NotFoundHttpError } from '../../../src/util/errors/NotFoundHttpError';
|
||||
import { UnauthorizedHttpError } from '../../../src/util/errors/UnauthorizedHttpError';
|
||||
import { getParentContainer } from '../../../src/util/PathUtil';
|
||||
|
||||
const nn = namedNode;
|
||||
|
||||
@ -23,10 +23,6 @@ describe('A WebAclAuthorizer', (): void => {
|
||||
id.path.endsWith('.acl') ? id : { path: `${id.path}.acl` },
|
||||
isAcl: async(id: ResourceIdentifier): Promise<boolean> => id.path.endsWith('.acl'),
|
||||
};
|
||||
const containerManager: ContainerManager = {
|
||||
getContainer: async(id: ResourceIdentifier): Promise<ResourceIdentifier> =>
|
||||
({ path: new URL('..', id.path).toString() }),
|
||||
};
|
||||
let permissions: PermissionSet;
|
||||
let credentials: Credentials;
|
||||
let identifier: ResourceIdentifier;
|
||||
@ -42,7 +38,7 @@ describe('A WebAclAuthorizer', (): void => {
|
||||
});
|
||||
|
||||
it('handles all inputs.', async(): Promise<void> => {
|
||||
authorizer = new WebAclAuthorizer(aclManager, containerManager, null as any);
|
||||
authorizer = new WebAclAuthorizer(aclManager, null as any);
|
||||
await expect(authorizer.canHandle({} as any)).resolves.toBeUndefined();
|
||||
});
|
||||
|
||||
@ -54,7 +50,7 @@ describe('A WebAclAuthorizer', (): void => {
|
||||
quad(nn('auth'), nn(`${acl}mode`), nn(`${acl}Read`)),
|
||||
]) } as Representation),
|
||||
} as unknown as ResourceStore;
|
||||
authorizer = new WebAclAuthorizer(aclManager, containerManager, store);
|
||||
authorizer = new WebAclAuthorizer(aclManager, store);
|
||||
await expect(authorizer.handle({ identifier, permissions, credentials })).resolves.toBeUndefined();
|
||||
});
|
||||
|
||||
@ -67,13 +63,13 @@ describe('A WebAclAuthorizer', (): void => {
|
||||
return {
|
||||
data: streamifyArray([
|
||||
quad(nn('auth'), nn(`${acl}agentClass`), nn('http://xmlns.com/foaf/0.1/Agent')),
|
||||
quad(nn('auth'), nn(`${acl}default`), nn((await containerManager.getContainer(identifier)).path)),
|
||||
quad(nn('auth'), nn(`${acl}default`), nn(getParentContainer(identifier).path)),
|
||||
quad(nn('auth'), nn(`${acl}mode`), nn(`${acl}Read`)),
|
||||
]),
|
||||
} as Representation;
|
||||
},
|
||||
} as unknown as ResourceStore;
|
||||
authorizer = new WebAclAuthorizer(aclManager, containerManager, store);
|
||||
authorizer = new WebAclAuthorizer(aclManager, store);
|
||||
await expect(authorizer.handle({ identifier, permissions, credentials })).resolves.toBeUndefined();
|
||||
});
|
||||
|
||||
@ -85,7 +81,7 @@ describe('A WebAclAuthorizer', (): void => {
|
||||
quad(nn('auth'), nn(`${acl}mode`), nn(`${acl}Read`)),
|
||||
]) } as Representation),
|
||||
} as unknown as ResourceStore;
|
||||
authorizer = new WebAclAuthorizer(aclManager, containerManager, store);
|
||||
authorizer = new WebAclAuthorizer(aclManager, store);
|
||||
credentials.webID = 'http://test.com/user';
|
||||
await expect(authorizer.handle({ identifier, permissions, credentials })).resolves.toBeUndefined();
|
||||
});
|
||||
@ -98,7 +94,7 @@ describe('A WebAclAuthorizer', (): void => {
|
||||
quad(nn('auth'), nn(`${acl}mode`), nn(`${acl}Read`)),
|
||||
]) } as Representation),
|
||||
} as unknown as ResourceStore;
|
||||
authorizer = new WebAclAuthorizer(aclManager, containerManager, store);
|
||||
authorizer = new WebAclAuthorizer(aclManager, store);
|
||||
await expect(authorizer.handle({ identifier, permissions, credentials })).rejects.toThrow(UnauthorizedHttpError);
|
||||
});
|
||||
|
||||
@ -111,7 +107,7 @@ describe('A WebAclAuthorizer', (): void => {
|
||||
quad(nn('auth'), nn(`${acl}mode`), nn(`${acl}Read`)),
|
||||
]) } as Representation),
|
||||
} as unknown as ResourceStore;
|
||||
authorizer = new WebAclAuthorizer(aclManager, containerManager, store);
|
||||
authorizer = new WebAclAuthorizer(aclManager, store);
|
||||
await expect(authorizer.handle({ identifier, permissions, credentials })).resolves.toBeUndefined();
|
||||
});
|
||||
|
||||
@ -124,7 +120,7 @@ describe('A WebAclAuthorizer', (): void => {
|
||||
quad(nn('auth'), nn(`${acl}mode`), nn(`${acl}Read`)),
|
||||
]) } as Representation),
|
||||
} as unknown as ResourceStore;
|
||||
authorizer = new WebAclAuthorizer(aclManager, containerManager, store);
|
||||
authorizer = new WebAclAuthorizer(aclManager, store);
|
||||
await expect(authorizer.handle({ identifier, permissions, credentials })).rejects.toThrow(ForbiddenHttpError);
|
||||
});
|
||||
|
||||
@ -139,7 +135,7 @@ describe('A WebAclAuthorizer', (): void => {
|
||||
]) } as Representation),
|
||||
} as unknown as ResourceStore;
|
||||
identifier = await aclManager.getAcl(identifier);
|
||||
authorizer = new WebAclAuthorizer(aclManager, containerManager, store);
|
||||
authorizer = new WebAclAuthorizer(aclManager, store);
|
||||
await expect(authorizer.handle({ identifier, permissions, credentials })).resolves.toBeUndefined();
|
||||
});
|
||||
|
||||
@ -154,7 +150,7 @@ describe('A WebAclAuthorizer', (): void => {
|
||||
]) } as Representation),
|
||||
} as unknown as ResourceStore;
|
||||
identifier = await aclManager.getAcl(identifier);
|
||||
authorizer = new WebAclAuthorizer(aclManager, containerManager, store);
|
||||
authorizer = new WebAclAuthorizer(aclManager, store);
|
||||
await expect(authorizer.handle({ identifier, permissions, credentials })).rejects.toThrow(ForbiddenHttpError);
|
||||
});
|
||||
|
||||
@ -164,7 +160,7 @@ describe('A WebAclAuthorizer', (): void => {
|
||||
throw new Error('TEST!');
|
||||
},
|
||||
} as unknown as ResourceStore;
|
||||
authorizer = new WebAclAuthorizer(aclManager, containerManager, store);
|
||||
authorizer = new WebAclAuthorizer(aclManager, store);
|
||||
await expect(authorizer.handle({ identifier, permissions, credentials })).rejects.toThrow('TEST!');
|
||||
});
|
||||
});
|
||||
|
@ -8,7 +8,7 @@ import { RepresentationMetadata } from '../../../../src/ldp/representation/Repre
|
||||
import type { HttpRequest } from '../../../../src/server/HttpRequest';
|
||||
import { UnsupportedHttpError } from '../../../../src/util/errors/UnsupportedHttpError';
|
||||
import { UnsupportedMediaTypeHttpError } from '../../../../src/util/errors/UnsupportedMediaTypeHttpError';
|
||||
import { readableToString } from '../../../../src/util/Util';
|
||||
import { readableToString } from '../../../../src/util/StreamUtil';
|
||||
|
||||
describe('A SparqlUpdateBodyParser', (): void => {
|
||||
const bodyParser = new SparqlUpdateBodyParser();
|
||||
|
@ -1,8 +1,8 @@
|
||||
import { LinkRelMetadataWriter } from '../../../../../src/ldp/http/metadata/LinkRelMetadataWriter';
|
||||
import { RepresentationMetadata } from '../../../../../src/ldp/representation/RepresentationMetadata';
|
||||
import * as util from '../../../../../src/util/HeaderUtil';
|
||||
import { LDP, RDF } from '../../../../../src/util/UriConstants';
|
||||
import { toNamedNode } from '../../../../../src/util/UriUtil';
|
||||
import * as util from '../../../../../src/util/Util';
|
||||
|
||||
describe('A LinkRelMetadataWriter', (): void => {
|
||||
const writer = new LinkRelMetadataWriter({ [RDF.type]: 'type', dummy: 'dummy' });
|
||||
|
@ -1,7 +1,7 @@
|
||||
import { MappedMetadataWriter } from '../../../../../src/ldp/http/metadata/MappedMetadataWriter';
|
||||
import { RepresentationMetadata } from '../../../../../src/ldp/representation/RepresentationMetadata';
|
||||
import * as util from '../../../../../src/util/HeaderUtil';
|
||||
import { CONTENT_TYPE } from '../../../../../src/util/UriConstants';
|
||||
import * as util from '../../../../../src/util/Util';
|
||||
|
||||
describe('A MappedMetadataWriter', (): void => {
|
||||
const writer = new MappedMetadataWriter({ [CONTENT_TYPE]: 'content-type', dummy: 'dummy' });
|
||||
|
@ -6,7 +6,6 @@ import type { Representation } from '../../../src/ldp/representation/Representat
|
||||
import { RepresentationMetadata } from '../../../src/ldp/representation/RepresentationMetadata';
|
||||
import type { ResourceIdentifier } from '../../../src/ldp/representation/ResourceIdentifier';
|
||||
import type { DataAccessor } from '../../../src/storage/accessors/DataAccessor';
|
||||
import type { ContainerManager } from '../../../src/storage/ContainerManager';
|
||||
import { DataAccessorBasedStore } from '../../../src/storage/DataAccessorBasedStore';
|
||||
import { INTERNAL_QUADS } from '../../../src/util/ContentTypes';
|
||||
import { ConflictHttpError } from '../../../src/util/errors/ConflictHttpError';
|
||||
@ -14,10 +13,9 @@ import { MethodNotAllowedHttpError } from '../../../src/util/errors/MethodNotAll
|
||||
import { NotFoundHttpError } from '../../../src/util/errors/NotFoundHttpError';
|
||||
import { NotImplementedError } from '../../../src/util/errors/NotImplementedError';
|
||||
import { UnsupportedHttpError } from '../../../src/util/errors/UnsupportedHttpError';
|
||||
import { MetadataController } from '../../../src/util/MetadataController';
|
||||
import * as quadUtil from '../../../src/util/QuadUtil';
|
||||
import { CONTENT_TYPE, HTTP, LDP, RDF } from '../../../src/util/UriConstants';
|
||||
import { toNamedNode } from '../../../src/util/UriUtil';
|
||||
import { ensureTrailingSlash } from '../../../src/util/Util';
|
||||
|
||||
class SimpleDataAccessor implements DataAccessor {
|
||||
public readonly data: Record<string, Representation> = {};
|
||||
@ -68,8 +66,6 @@ class SimpleDataAccessor implements DataAccessor {
|
||||
describe('A DataAccessorBasedStore', (): void => {
|
||||
let store: DataAccessorBasedStore;
|
||||
let accessor: SimpleDataAccessor;
|
||||
let containerManager: ContainerManager;
|
||||
let metadataController: MetadataController;
|
||||
const root = 'http://test.com/';
|
||||
let containerMetadata: RepresentationMetadata;
|
||||
let representation: Representation;
|
||||
@ -78,20 +74,7 @@ describe('A DataAccessorBasedStore', (): void => {
|
||||
beforeEach(async(): Promise<void> => {
|
||||
accessor = new SimpleDataAccessor();
|
||||
|
||||
metadataController = new MetadataController();
|
||||
|
||||
containerManager = {
|
||||
async getContainer(id: ResourceIdentifier): Promise<ResourceIdentifier> {
|
||||
return { path: new URL('..', ensureTrailingSlash(id.path)).toString() };
|
||||
},
|
||||
};
|
||||
|
||||
store = new DataAccessorBasedStore(
|
||||
accessor,
|
||||
root,
|
||||
metadataController,
|
||||
containerManager,
|
||||
);
|
||||
store = new DataAccessorBasedStore(accessor, root);
|
||||
|
||||
containerMetadata = new RepresentationMetadata(
|
||||
{ [RDF.type]: [ DataFactory.namedNode(LDP.Container), DataFactory.namedNode(LDP.BasicContainer) ]},
|
||||
@ -171,11 +154,12 @@ describe('A DataAccessorBasedStore', (): void => {
|
||||
|
||||
it('passes the result along if the MetadataController throws a non-Error.', async(): Promise<void> => {
|
||||
const resourceID = { path: root };
|
||||
metadataController.parseQuads = async(): Promise<any> => {
|
||||
const mock = jest.spyOn(quadUtil, 'parseQuads').mockImplementationOnce(async(): Promise<any> => {
|
||||
throw 'apple';
|
||||
};
|
||||
});
|
||||
representation.metadata.add(RDF.type, toNamedNode(LDP.Container));
|
||||
await expect(store.addResource(resourceID, representation)).rejects.toBe('apple');
|
||||
mock.mockRestore();
|
||||
});
|
||||
|
||||
it('can write resources.', async(): Promise<void> => {
|
||||
|
@ -2,7 +2,7 @@ import fs from 'fs';
|
||||
import { ExtensionBasedMapper } from '../../../src/storage/ExtensionBasedMapper';
|
||||
import { NotFoundHttpError } from '../../../src/util/errors/NotFoundHttpError';
|
||||
import { UnsupportedHttpError } from '../../../src/util/errors/UnsupportedHttpError';
|
||||
import { trimTrailingSlashes } from '../../../src/util/Util';
|
||||
import { trimTrailingSlashes } from '../../../src/util/PathUtil';
|
||||
|
||||
jest.mock('fs');
|
||||
|
||||
|
@ -1,31 +0,0 @@
|
||||
import { UrlContainerManager } from '../../../src/storage/UrlContainerManager';
|
||||
|
||||
describe('An UrlContainerManager', (): void => {
|
||||
it('returns the parent URl for a single call.', async(): Promise<void> => {
|
||||
const manager = new UrlContainerManager('http://test.com/foo/');
|
||||
await expect(manager.getContainer({ path: 'http://test.com/foo/bar' }))
|
||||
.resolves.toEqual({ path: 'http://test.com/foo/' });
|
||||
await expect(manager.getContainer({ path: 'http://test.com/foo/bar/' }))
|
||||
.resolves.toEqual({ path: 'http://test.com/foo/' });
|
||||
});
|
||||
|
||||
it('errors when getting the container of root.', async(): Promise<void> => {
|
||||
let manager = new UrlContainerManager('http://test.com/foo/');
|
||||
await expect(manager.getContainer({ path: 'http://test.com/foo/' }))
|
||||
.rejects.toThrow('Root does not have a container');
|
||||
await expect(manager.getContainer({ path: 'http://test.com/foo' }))
|
||||
.rejects.toThrow('Root does not have a container');
|
||||
|
||||
manager = new UrlContainerManager('http://test.com/foo/');
|
||||
await expect(manager.getContainer({ path: 'http://test.com/foo/' }))
|
||||
.rejects.toThrow('Root does not have a container');
|
||||
await expect(manager.getContainer({ path: 'http://test.com/foo' }))
|
||||
.rejects.toThrow('Root does not have a container');
|
||||
});
|
||||
|
||||
it('errors when the root of an URl is reached that does not match the input root.', async(): Promise<void> => {
|
||||
const manager = new UrlContainerManager('http://test.com/foo/');
|
||||
await expect(manager.getContainer({ path: 'http://test.com/' }))
|
||||
.rejects.toThrow('URL root reached');
|
||||
});
|
||||
});
|
@ -9,10 +9,9 @@ import { ConflictHttpError } from '../../../../src/util/errors/ConflictHttpError
|
||||
import { NotFoundHttpError } from '../../../../src/util/errors/NotFoundHttpError';
|
||||
import type { SystemError } from '../../../../src/util/errors/SystemError';
|
||||
import { UnsupportedMediaTypeHttpError } from '../../../../src/util/errors/UnsupportedMediaTypeHttpError';
|
||||
import { MetadataController } from '../../../../src/util/MetadataController';
|
||||
import { readableToString } from '../../../../src/util/StreamUtil';
|
||||
import { CONTENT_TYPE, DCTERMS, LDP, POSIX, RDF, XSD } from '../../../../src/util/UriConstants';
|
||||
import { toNamedNode, toTypedLiteral } from '../../../../src/util/UriUtil';
|
||||
import { readableToString } from '../../../../src/util/Util';
|
||||
import { mockFs } from '../../../util/Util';
|
||||
|
||||
jest.mock('fs');
|
||||
@ -28,10 +27,7 @@ describe('A FileDataAccessor', (): void => {
|
||||
|
||||
beforeEach(async(): Promise<void> => {
|
||||
cache = mockFs(rootFilePath, now);
|
||||
accessor = new FileDataAccessor(
|
||||
new ExtensionBasedMapper(base, rootFilePath),
|
||||
new MetadataController(),
|
||||
);
|
||||
accessor = new FileDataAccessor(new ExtensionBasedMapper(base, rootFilePath));
|
||||
|
||||
metadata = new RepresentationMetadata({ [CONTENT_TYPE]: APPLICATION_OCTET_STREAM });
|
||||
});
|
||||
|
@ -3,10 +3,9 @@ import { RepresentationMetadata } from '../../../../src/ldp/representation/Repre
|
||||
import { InMemoryDataAccessor } from '../../../../src/storage/accessors/InMemoryDataAccessor';
|
||||
import { APPLICATION_OCTET_STREAM } from '../../../../src/util/ContentTypes';
|
||||
import { NotFoundHttpError } from '../../../../src/util/errors/NotFoundHttpError';
|
||||
import { MetadataController } from '../../../../src/util/MetadataController';
|
||||
import { readableToString } from '../../../../src/util/StreamUtil';
|
||||
import { CONTENT_TYPE, LDP, RDF } from '../../../../src/util/UriConstants';
|
||||
import { toNamedNode } from '../../../../src/util/UriUtil';
|
||||
import { readableToString } from '../../../../src/util/Util';
|
||||
|
||||
describe('An InMemoryDataAccessor', (): void => {
|
||||
const base = 'http://test.com/';
|
||||
@ -14,10 +13,7 @@ describe('An InMemoryDataAccessor', (): void => {
|
||||
let metadata: RepresentationMetadata;
|
||||
|
||||
beforeEach(async(): Promise<void> => {
|
||||
accessor = new InMemoryDataAccessor(
|
||||
base,
|
||||
new MetadataController(),
|
||||
);
|
||||
accessor = new InMemoryDataAccessor(base);
|
||||
|
||||
metadata = new RepresentationMetadata({ [CONTENT_TYPE]: APPLICATION_OCTET_STREAM });
|
||||
});
|
||||
|
@ -6,13 +6,11 @@ import type { Quad } from 'rdf-js';
|
||||
import streamifyArray from 'streamify-array';
|
||||
import { RepresentationMetadata } from '../../../../src/ldp/representation/RepresentationMetadata';
|
||||
import { SparqlDataAccessor } from '../../../../src/storage/accessors/SparqlDataAccessor';
|
||||
import { UrlContainerManager } from '../../../../src/storage/UrlContainerManager';
|
||||
import { INTERNAL_QUADS } from '../../../../src/util/ContentTypes';
|
||||
import { ConflictHttpError } from '../../../../src/util/errors/ConflictHttpError';
|
||||
import { NotFoundHttpError } from '../../../../src/util/errors/NotFoundHttpError';
|
||||
import { UnsupportedHttpError } from '../../../../src/util/errors/UnsupportedHttpError';
|
||||
import { UnsupportedMediaTypeHttpError } from '../../../../src/util/errors/UnsupportedMediaTypeHttpError';
|
||||
import { MetadataController } from '../../../../src/util/MetadataController';
|
||||
import { CONTENT_TYPE, LDP, RDF } from '../../../../src/util/UriConstants';
|
||||
import { toNamedNode } from '../../../../src/util/UriUtil';
|
||||
|
||||
@ -60,7 +58,7 @@ describe('A SparqlDataAccessor', (): void => {
|
||||
}));
|
||||
|
||||
// This needs to be last so the fetcher can be mocked first
|
||||
accessor = new SparqlDataAccessor(endpoint, base, new UrlContainerManager(base), new MetadataController());
|
||||
accessor = new SparqlDataAccessor(endpoint, base);
|
||||
});
|
||||
|
||||
it('can only handle quad data.', async(): Promise<void> => {
|
||||
|
@ -2,7 +2,7 @@ import type { Representation } from '../../../../src/ldp/representation/Represen
|
||||
import { RepresentationMetadata } from '../../../../src/ldp/representation/RepresentationMetadata';
|
||||
import type { RepresentationPreferences } from '../../../../src/ldp/representation/RepresentationPreferences';
|
||||
import { ChainedConverter } from '../../../../src/storage/conversion/ChainedConverter';
|
||||
import { checkRequest } from '../../../../src/storage/conversion/ConversionUtil';
|
||||
import { validateRequestArgs } from '../../../../src/storage/conversion/ConversionUtil';
|
||||
import type { RepresentationConverterArgs } from '../../../../src/storage/conversion/RepresentationConverter';
|
||||
import { TypedRepresentationConverter } from '../../../../src/storage/conversion/TypedRepresentationConverter';
|
||||
import { CONTENT_TYPE } from '../../../../src/util/UriConstants';
|
||||
@ -26,7 +26,7 @@ class DummyConverter extends TypedRepresentationConverter {
|
||||
}
|
||||
|
||||
public async canHandle(input: RepresentationConverterArgs): Promise<void> {
|
||||
checkRequest(input, Object.keys(this.inTypes), Object.keys(this.outTypes));
|
||||
validateRequestArgs(input, Object.keys(this.inTypes), Object.keys(this.outTypes));
|
||||
}
|
||||
|
||||
public async handle(input: RepresentationConverterArgs): Promise<Representation> {
|
||||
|
@ -2,11 +2,15 @@ import type { Representation } from '../../../../src/ldp/representation/Represen
|
||||
import { RepresentationMetadata } from '../../../../src/ldp/representation/RepresentationMetadata';
|
||||
import type { RepresentationPreferences } from '../../../../src/ldp/representation/RepresentationPreferences';
|
||||
import type { ResourceIdentifier } from '../../../../src/ldp/representation/ResourceIdentifier';
|
||||
import { checkRequest, matchingTypes } from '../../../../src/storage/conversion/ConversionUtil';
|
||||
import {
|
||||
matchingMediaType,
|
||||
matchingTypes,
|
||||
validateRequestArgs,
|
||||
} from '../../../../src/storage/conversion/ConversionUtil';
|
||||
import { InternalServerError } from '../../../../src/util/errors/InternalServerError';
|
||||
import { UnsupportedHttpError } from '../../../../src/util/errors/UnsupportedHttpError';
|
||||
|
||||
describe('A ConversionUtil', (): void => {
|
||||
describe('ConversionUtil', (): void => {
|
||||
const identifier: ResourceIdentifier = { path: 'path' };
|
||||
let representation: Representation;
|
||||
let metadata: RepresentationMetadata;
|
||||
@ -16,31 +20,31 @@ describe('A ConversionUtil', (): void => {
|
||||
representation = { metadata } as Representation;
|
||||
});
|
||||
|
||||
describe('#checkRequest', (): void => {
|
||||
describe('#validateRequestArgs', (): void => {
|
||||
it('requires an input type.', async(): Promise<void> => {
|
||||
const preferences: RepresentationPreferences = {};
|
||||
expect((): any => checkRequest({ identifier, representation, preferences }, [ 'a/x' ], [ 'a/x' ]))
|
||||
expect((): any => validateRequestArgs({ identifier, representation, preferences }, [ 'a/x' ], [ 'a/x' ]))
|
||||
.toThrow('Input type required for conversion.');
|
||||
});
|
||||
|
||||
it('requires a matching input type.', async(): Promise<void> => {
|
||||
metadata.contentType = 'a/x';
|
||||
const preferences: RepresentationPreferences = { type: [{ value: 'b/x', weight: 1 }]};
|
||||
expect((): any => checkRequest({ identifier, representation, preferences }, [ 'c/x' ], [ 'a/x' ]))
|
||||
expect((): any => validateRequestArgs({ identifier, representation, preferences }, [ 'c/x' ], [ 'a/x' ]))
|
||||
.toThrow('Can only convert from c/x to a/x.');
|
||||
});
|
||||
|
||||
it('requires a matching output type.', async(): Promise<void> => {
|
||||
metadata.contentType = 'a/x';
|
||||
const preferences: RepresentationPreferences = { type: [{ value: 'b/x', weight: 1 }]};
|
||||
expect((): any => checkRequest({ identifier, representation, preferences }, [ 'a/x' ], [ 'c/x' ]))
|
||||
expect((): any => validateRequestArgs({ identifier, representation, preferences }, [ 'a/x' ], [ 'c/x' ]))
|
||||
.toThrow('Can only convert from a/x to c/x.');
|
||||
});
|
||||
|
||||
it('succeeds with a valid input and output type.', async(): Promise<void> => {
|
||||
metadata.contentType = 'a/x';
|
||||
const preferences: RepresentationPreferences = { type: [{ value: 'b/x', weight: 1 }]};
|
||||
expect(checkRequest({ identifier, representation, preferences }, [ 'a/x' ], [ 'b/x' ]))
|
||||
expect(validateRequestArgs({ identifier, representation, preferences }, [ 'a/x' ], [ 'b/x' ]))
|
||||
.toBeUndefined();
|
||||
});
|
||||
});
|
||||
@ -91,4 +95,18 @@ describe('A ConversionUtil', (): void => {
|
||||
.toEqual([{ value: 'a/x', weight: 1 }, { value: 'internal/quads', weight: 0.5 }]);
|
||||
});
|
||||
});
|
||||
|
||||
describe('#matchingMediaType', (): void => {
|
||||
it('matches all possible media types.', async(): Promise<void> => {
|
||||
expect(matchingMediaType('*/*', 'text/turtle')).toBeTruthy();
|
||||
expect(matchingMediaType('text/*', '*/*')).toBeTruthy();
|
||||
expect(matchingMediaType('text/*', 'text/turtle')).toBeTruthy();
|
||||
expect(matchingMediaType('text/plain', 'text/*')).toBeTruthy();
|
||||
expect(matchingMediaType('text/turtle', 'text/turtle')).toBeTruthy();
|
||||
|
||||
expect(matchingMediaType('text/*', 'application/*')).toBeFalsy();
|
||||
expect(matchingMediaType('text/plain', 'application/*')).toBeFalsy();
|
||||
expect(matchingMediaType('text/plain', 'text/turtle')).toBeFalsy();
|
||||
});
|
||||
});
|
||||
});
|
||||
|
@ -1,4 +1,6 @@
|
||||
import type { HttpResponse } from '../../../src/server/HttpResponse';
|
||||
import {
|
||||
addHeader,
|
||||
parseAccept,
|
||||
parseAcceptCharset,
|
||||
parseAcceptEncoding,
|
||||
@ -6,7 +8,7 @@ import {
|
||||
} from '../../../src/util/HeaderUtil';
|
||||
|
||||
describe('HeaderUtil', (): void => {
|
||||
describe('parseAccept function', (): void => {
|
||||
describe('#parseAccept', (): void => {
|
||||
it('parses empty Accept headers.', async(): Promise<void> => {
|
||||
expect(parseAccept('')).toEqual([]);
|
||||
});
|
||||
@ -71,7 +73,7 @@ describe('HeaderUtil', (): void => {
|
||||
});
|
||||
});
|
||||
|
||||
describe('parseCharset function', (): void => {
|
||||
describe('#parseCharset', (): void => {
|
||||
it('parses Accept-Charset headers.', async(): Promise<void> => {
|
||||
expect(parseAcceptCharset('iso-8859-5, unicode-1-1;q=0.8')).toEqual([
|
||||
{ range: 'iso-8859-5', weight: 1 },
|
||||
@ -86,7 +88,7 @@ describe('HeaderUtil', (): void => {
|
||||
});
|
||||
});
|
||||
|
||||
describe('parseEncoding function', (): void => {
|
||||
describe('#parseEncoding', (): void => {
|
||||
it('parses empty Accept-Encoding headers.', async(): Promise<void> => {
|
||||
expect(parseAcceptCharset('')).toEqual([]);
|
||||
});
|
||||
@ -106,7 +108,7 @@ describe('HeaderUtil', (): void => {
|
||||
});
|
||||
});
|
||||
|
||||
describe('parseLanguage function', (): void => {
|
||||
describe('#parseLanguage', (): void => {
|
||||
it('parses Accept-Language headers.', async(): Promise<void> => {
|
||||
expect(parseAcceptLanguage('da, en-gb;q=0.8, en;q=0.7')).toEqual([
|
||||
{ range: 'da', weight: 1 },
|
||||
@ -127,4 +129,41 @@ describe('HeaderUtil', (): void => {
|
||||
expect((): any => parseAcceptCharset('a; c=d')).toThrow('Only q parameters are allowed');
|
||||
});
|
||||
});
|
||||
|
||||
describe('#addHeader', (): void => {
|
||||
let response: HttpResponse;
|
||||
|
||||
beforeEach(async(): Promise<void> => {
|
||||
const headers: Record<string, string | number | string[]> = {};
|
||||
response = {
|
||||
hasHeader: (name: string): boolean => Boolean(headers[name]),
|
||||
getHeader: (name: string): number | string | string[] | undefined => headers[name],
|
||||
setHeader(name: string, value: number | string | string[]): void {
|
||||
headers[name] = value;
|
||||
},
|
||||
} as any;
|
||||
});
|
||||
|
||||
it('adds values if there are none already.', async(): Promise<void> => {
|
||||
expect(addHeader(response, 'name', 'value')).toBeUndefined();
|
||||
expect(response.getHeader('name')).toBe('value');
|
||||
|
||||
expect(addHeader(response, 'names', [ 'value1', 'values2' ])).toBeUndefined();
|
||||
expect(response.getHeader('names')).toEqual([ 'value1', 'values2' ]);
|
||||
});
|
||||
|
||||
it('appends values to already existing values.', async(): Promise<void> => {
|
||||
response.setHeader('name', 'oldValue');
|
||||
expect(addHeader(response, 'name', 'value')).toBeUndefined();
|
||||
expect(response.getHeader('name')).toEqual([ 'oldValue', 'value' ]);
|
||||
|
||||
response.setHeader('number', 5);
|
||||
expect(addHeader(response, 'number', 'value')).toBeUndefined();
|
||||
expect(response.getHeader('number')).toEqual([ '5', 'value' ]);
|
||||
|
||||
response.setHeader('names', [ 'oldValue1', 'oldValue2' ]);
|
||||
expect(addHeader(response, 'names', [ 'value1', 'values2' ])).toBeUndefined();
|
||||
expect(response.getHeader('names')).toEqual([ 'oldValue1', 'oldValue2', 'value1', 'values2' ]);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
43
test/unit/util/PathUtil.test.ts
Normal file
43
test/unit/util/PathUtil.test.ts
Normal file
@ -0,0 +1,43 @@
|
||||
import {
|
||||
decodeUriPathComponents,
|
||||
encodeUriPathComponents,
|
||||
ensureTrailingSlash,
|
||||
getParentContainer,
|
||||
toCanonicalUriPath,
|
||||
} from '../../../src/util/PathUtil';
|
||||
|
||||
describe('PathUtil', (): void => {
|
||||
describe('#ensureTrailingSlash', (): void => {
|
||||
it('makes sure there is always exactly 1 slash.', async(): Promise<void> => {
|
||||
expect(ensureTrailingSlash('http://test.com')).toEqual('http://test.com/');
|
||||
expect(ensureTrailingSlash('http://test.com/')).toEqual('http://test.com/');
|
||||
expect(ensureTrailingSlash('http://test.com//')).toEqual('http://test.com/');
|
||||
expect(ensureTrailingSlash('http://test.com///')).toEqual('http://test.com/');
|
||||
});
|
||||
});
|
||||
|
||||
describe('UriPath functions', (): void => {
|
||||
it('makes sure only the necessary parts are encoded with toCanonicalUriPath.', async(): Promise<void> => {
|
||||
expect(toCanonicalUriPath('/a%20path&/name')).toEqual('/a%20path%26/name');
|
||||
});
|
||||
|
||||
it('decodes all parts of a path with decodeUriPathComponents.', async(): Promise<void> => {
|
||||
expect(decodeUriPathComponents('/a%20path&/name')).toEqual('/a path&/name');
|
||||
});
|
||||
|
||||
it('encodes all parts of a path with encodeUriPathComponents.', async(): Promise<void> => {
|
||||
expect(encodeUriPathComponents('/a%20path&/name')).toEqual('/a%2520path%26/name');
|
||||
});
|
||||
});
|
||||
|
||||
describe('#getParentContainer', (): void => {
|
||||
it('returns the parent URl for a single call.', async(): Promise<void> => {
|
||||
expect(getParentContainer({ path: 'http://test.com/foo/bar' })).toEqual({ path: 'http://test.com/foo/' });
|
||||
expect(getParentContainer({ path: 'http://test.com/foo/bar/' })).toEqual({ path: 'http://test.com/foo/' });
|
||||
});
|
||||
|
||||
it('errors when the root of an URl is reached that does not match the input root.', async(): Promise<void> => {
|
||||
expect((): any => getParentContainer({ path: 'http://test.com/' })).toThrow('URL root reached');
|
||||
});
|
||||
});
|
||||
});
|
15
test/unit/util/QuadUtil.test.ts
Normal file
15
test/unit/util/QuadUtil.test.ts
Normal file
@ -0,0 +1,15 @@
|
||||
import { DataFactory } from 'n3';
|
||||
import type { Quad } from 'rdf-js';
|
||||
import { pushQuad } from '../../../src/util/QuadUtil';
|
||||
|
||||
describe('QuadUtil', (): void => {
|
||||
describe('#pushQuad', (): void => {
|
||||
it('creates a quad and adds it to the given array.', async(): Promise<void> => {
|
||||
const quads: Quad[] = [];
|
||||
pushQuad(quads, DataFactory.namedNode('sub'), DataFactory.namedNode('pred'), DataFactory.literal('obj'));
|
||||
expect(quads).toEqualRdfQuadArray([
|
||||
DataFactory.quad(DataFactory.namedNode('sub'), DataFactory.namedNode('pred'), DataFactory.literal('obj')),
|
||||
]);
|
||||
});
|
||||
});
|
||||
});
|
43
test/unit/util/StreamUtil.test.ts
Normal file
43
test/unit/util/StreamUtil.test.ts
Normal file
@ -0,0 +1,43 @@
|
||||
import { PassThrough } from 'stream';
|
||||
import streamifyArray from 'streamify-array';
|
||||
import { pipeSafely, readableToString } from '../../../src/util/StreamUtil';
|
||||
|
||||
describe('StreamUtil', (): void => {
|
||||
describe('#readableToString', (): void => {
|
||||
it('concatenates all elements of a Readable.', async(): Promise<void> => {
|
||||
const stream = streamifyArray([ 'a', 'b', 'c' ]);
|
||||
await expect(readableToString(stream)).resolves.toEqual('abc');
|
||||
});
|
||||
});
|
||||
|
||||
describe('#pipeSafely', (): void => {
|
||||
it('pipes data from one stream to the other.', async(): Promise<void> => {
|
||||
const input = streamifyArray([ 'data' ]);
|
||||
const output = new PassThrough();
|
||||
const piped = pipeSafely(input, output);
|
||||
await expect(readableToString(piped)).resolves.toEqual('data');
|
||||
});
|
||||
|
||||
it('pipes errors from one stream to the other.', async(): Promise<void> => {
|
||||
const input = new PassThrough();
|
||||
input.read = (): any => {
|
||||
input.emit('error', new Error('error'));
|
||||
return null;
|
||||
};
|
||||
const output = new PassThrough();
|
||||
const piped = pipeSafely(input, output);
|
||||
await expect(readableToString(piped)).rejects.toThrow(new Error('error'));
|
||||
});
|
||||
|
||||
it('supports mapping errors to something else.', async(): Promise<void> => {
|
||||
const input = streamifyArray([ 'data' ]);
|
||||
input.read = (): any => {
|
||||
input.emit('error', new Error('error'));
|
||||
return null;
|
||||
};
|
||||
const output = new PassThrough();
|
||||
const piped = pipeSafely(input, output, (): any => new Error('other error'));
|
||||
await expect(readableToString(piped)).rejects.toThrow(new Error('other error'));
|
||||
});
|
||||
});
|
||||
});
|
@ -1,138 +0,0 @@
|
||||
import { PassThrough } from 'stream';
|
||||
import { DataFactory } from 'n3';
|
||||
import type { Quad } from 'rdf-js';
|
||||
import streamifyArray from 'streamify-array';
|
||||
import type { HttpResponse } from '../../../src/server/HttpResponse';
|
||||
import {
|
||||
addHeader,
|
||||
decodeUriPathComponents,
|
||||
encodeUriPathComponents,
|
||||
ensureTrailingSlash,
|
||||
matchingMediaType, pipeSafe, pushQuad,
|
||||
readableToString,
|
||||
toCanonicalUriPath,
|
||||
} from '../../../src/util/Util';
|
||||
|
||||
describe('Util function', (): void => {
|
||||
describe('ensureTrailingSlash', (): void => {
|
||||
it('makes sure there is always exactly 1 slash.', async(): Promise<void> => {
|
||||
expect(ensureTrailingSlash('http://test.com')).toEqual('http://test.com/');
|
||||
expect(ensureTrailingSlash('http://test.com/')).toEqual('http://test.com/');
|
||||
expect(ensureTrailingSlash('http://test.com//')).toEqual('http://test.com/');
|
||||
expect(ensureTrailingSlash('http://test.com///')).toEqual('http://test.com/');
|
||||
});
|
||||
});
|
||||
|
||||
describe('readableToString', (): void => {
|
||||
it('concatenates all elements of a Readable.', async(): Promise<void> => {
|
||||
const stream = streamifyArray([ 'a', 'b', 'c' ]);
|
||||
await expect(readableToString(stream)).resolves.toEqual('abc');
|
||||
});
|
||||
});
|
||||
|
||||
describe('matchingMediaType', (): void => {
|
||||
it('matches all possible media types.', async(): Promise<void> => {
|
||||
expect(matchingMediaType('*/*', 'text/turtle')).toBeTruthy();
|
||||
expect(matchingMediaType('text/*', '*/*')).toBeTruthy();
|
||||
expect(matchingMediaType('text/*', 'text/turtle')).toBeTruthy();
|
||||
expect(matchingMediaType('text/plain', 'text/*')).toBeTruthy();
|
||||
expect(matchingMediaType('text/turtle', 'text/turtle')).toBeTruthy();
|
||||
|
||||
expect(matchingMediaType('text/*', 'application/*')).toBeFalsy();
|
||||
expect(matchingMediaType('text/plain', 'application/*')).toBeFalsy();
|
||||
expect(matchingMediaType('text/plain', 'text/turtle')).toBeFalsy();
|
||||
});
|
||||
});
|
||||
|
||||
describe('pipeStreamsAndErrors', (): void => {
|
||||
it('pipes data from one stream to the other.', async(): Promise<void> => {
|
||||
const input = streamifyArray([ 'data' ]);
|
||||
const output = new PassThrough();
|
||||
const piped = pipeSafe(input, output);
|
||||
await expect(readableToString(piped)).resolves.toEqual('data');
|
||||
});
|
||||
|
||||
it('pipes errors from one stream to the other.', async(): Promise<void> => {
|
||||
const input = new PassThrough();
|
||||
input.read = (): any => {
|
||||
input.emit('error', new Error('error'));
|
||||
return null;
|
||||
};
|
||||
const output = new PassThrough();
|
||||
const piped = pipeSafe(input, output);
|
||||
await expect(readableToString(piped)).rejects.toThrow(new Error('error'));
|
||||
});
|
||||
|
||||
it('supports mapping errors to something else.', async(): Promise<void> => {
|
||||
const input = streamifyArray([ 'data' ]);
|
||||
input.read = (): any => {
|
||||
input.emit('error', new Error('error'));
|
||||
return null;
|
||||
};
|
||||
const output = new PassThrough();
|
||||
const piped = pipeSafe(input, output, (): any => new Error('other error'));
|
||||
await expect(readableToString(piped)).rejects.toThrow(new Error('other error'));
|
||||
});
|
||||
});
|
||||
|
||||
describe('UriPath functions', (): void => {
|
||||
it('makes sure only the necessary parts are encoded with toCanonicalUriPath.', async(): Promise<void> => {
|
||||
expect(toCanonicalUriPath('/a%20path&/name')).toEqual('/a%20path%26/name');
|
||||
});
|
||||
|
||||
it('decodes all parts of a path with decodeUriPathComponents.', async(): Promise<void> => {
|
||||
expect(decodeUriPathComponents('/a%20path&/name')).toEqual('/a path&/name');
|
||||
});
|
||||
|
||||
it('encodes all parts of a path with encodeUriPathComponents.', async(): Promise<void> => {
|
||||
expect(encodeUriPathComponents('/a%20path&/name')).toEqual('/a%2520path%26/name');
|
||||
});
|
||||
});
|
||||
|
||||
describe('pushQuad', (): void => {
|
||||
it('creates a quad and adds it to the given array.', async(): Promise<void> => {
|
||||
const quads: Quad[] = [];
|
||||
pushQuad(quads, DataFactory.namedNode('sub'), DataFactory.namedNode('pred'), DataFactory.literal('obj'));
|
||||
expect(quads).toEqualRdfQuadArray([
|
||||
DataFactory.quad(DataFactory.namedNode('sub'), DataFactory.namedNode('pred'), DataFactory.literal('obj')),
|
||||
]);
|
||||
});
|
||||
});
|
||||
|
||||
describe('addHeader', (): void => {
|
||||
let response: HttpResponse;
|
||||
|
||||
beforeEach(async(): Promise<void> => {
|
||||
const headers: Record<string, string | number | string[]> = {};
|
||||
response = {
|
||||
hasHeader: (name: string): boolean => Boolean(headers[name]),
|
||||
getHeader: (name: string): number | string | string[] | undefined => headers[name],
|
||||
setHeader(name: string, value: number | string | string[]): void {
|
||||
headers[name] = value;
|
||||
},
|
||||
} as any;
|
||||
});
|
||||
|
||||
it('adds values if there are none already.', async(): Promise<void> => {
|
||||
expect(addHeader(response, 'name', 'value')).toBeUndefined();
|
||||
expect(response.getHeader('name')).toBe('value');
|
||||
|
||||
expect(addHeader(response, 'names', [ 'value1', 'values2' ])).toBeUndefined();
|
||||
expect(response.getHeader('names')).toEqual([ 'value1', 'values2' ]);
|
||||
});
|
||||
|
||||
it('appends values to already existing values.', async(): Promise<void> => {
|
||||
response.setHeader('name', 'oldValue');
|
||||
expect(addHeader(response, 'name', 'value')).toBeUndefined();
|
||||
expect(response.getHeader('name')).toEqual([ 'oldValue', 'value' ]);
|
||||
|
||||
response.setHeader('number', 5);
|
||||
expect(addHeader(response, 'number', 'value')).toBeUndefined();
|
||||
expect(response.getHeader('number')).toEqual([ '5', 'value' ]);
|
||||
|
||||
response.setHeader('names', [ 'oldValue1', 'oldValue2' ]);
|
||||
expect(addHeader(response, 'names', [ 'value1', 'values2' ])).toBeUndefined();
|
||||
expect(response.getHeader('names')).toEqual([ 'oldValue1', 'oldValue2', 'value1', 'values2' ]);
|
||||
});
|
||||
});
|
||||
});
|
Loading…
x
Reference in New Issue
Block a user