diff --git a/RELEASE_NOTES.md b/RELEASE_NOTES.md index 505e14ab9..3b88c3406 100644 --- a/RELEASE_NOTES.md +++ b/RELEASE_NOTES.md @@ -4,6 +4,7 @@ ### New features - The Identity Provider now uses the `webid` scope as required for Solid-OIDC. - The `VoidLocker` can be used to disable locking for development/testing purposes. This can be enabled by changing the `/config/util/resource-locker/` import to `debug-void.json` +- Added support for setting a quota on the server. See the `config/quota-file.json` config for an example. ### Configuration changes You might need to make changes to your v2 configuration if you use a custom config. diff --git a/config/ldp/metadata-parser/default.json b/config/ldp/metadata-parser/default.json index bd9449c6a..e782bfd4b 100644 --- a/config/ldp/metadata-parser/default.json +++ b/config/ldp/metadata-parser/default.json @@ -2,6 +2,7 @@ "@context": "https://linkedsoftwaredependencies.org/bundles/npm/@solid/community-server/^2.0.0/components/context.jsonld", "import": [ "files-scs:config/ldp/metadata-parser/parsers/content-type.json", + "files-scs:config/ldp/metadata-parser/parsers/content-length.json", "files-scs:config/ldp/metadata-parser/parsers/slug.json", "files-scs:config/ldp/metadata-parser/parsers/link.json" ], @@ -12,6 +13,7 @@ "@type": "ParallelHandler", "handlers": [ { "@id": "urn:solid-server:default:ContentTypeParser" }, + { "@id": "urn:solid-server:default:ContentLengthParser" }, { "@id": "urn:solid-server:default:SlugParser" }, { "@id": "urn:solid-server:default:LinkRelParser" } ] diff --git a/config/ldp/metadata-parser/parsers/content-length.json b/config/ldp/metadata-parser/parsers/content-length.json new file mode 100644 index 000000000..1ec1a2311 --- /dev/null +++ b/config/ldp/metadata-parser/parsers/content-length.json @@ -0,0 +1,10 @@ +{ + "@context": "https://linkedsoftwaredependencies.org/bundles/npm/@solid/community-server/^2.0.0/components/context.jsonld", + "@graph": [ + { + "comment": "Converts content-length headers into RDF metadata.", + "@id": "urn:solid-server:default:ContentLengthParser", + "@type": "ContentLengthParser" + } + ] +} diff --git a/config/quota-file.json b/config/quota-file.json new file mode 100644 index 000000000..d551f8118 --- /dev/null +++ b/config/quota-file.json @@ -0,0 +1,48 @@ +{ + "@context": "https://linkedsoftwaredependencies.org/bundles/npm/@solid/community-server/^2.0.0/components/context.jsonld", + "import": [ + "files-scs:config/app/main/default.json", + "files-scs:config/app/init/default.json", + "files-scs:config/app/setup/required.json", + "files-scs:config/http/handler/default.json", + "files-scs:config/http/middleware/websockets.json", + "files-scs:config/http/server-factory/websockets.json", + "files-scs:config/http/static/default.json", + "files-scs:config/identity/access/public.json", + "files-scs:config/identity/email/default.json", + "files-scs:config/identity/handler/default.json", + "files-scs:config/identity/ownership/token.json", + "files-scs:config/identity/pod/static.json", + "files-scs:config/identity/registration/enabled.json", + "files-scs:config/ldp/authentication/dpop-bearer.json", + "files-scs:config/ldp/authorization/allow-all.json", + "files-scs:config/ldp/handler/default.json", + "files-scs:config/ldp/metadata-parser/default.json", + "files-scs:config/ldp/metadata-writer/default.json", + "files-scs:config/ldp/modes/default.json", + "files-scs:config/storage/backend/pod-quota-file.json", + "files-scs:config/storage/key-value/resource-store.json", + "files-scs:config/storage/middleware/default.json", + "files-scs:config/util/auxiliary/acl.json", + "files-scs:config/util/identifiers/suffix.json", + "files-scs:config/util/index/default.json", + "files-scs:config/util/logging/winston.json", + "files-scs:config/util/representation-conversion/default.json", + "files-scs:config/util/resource-locker/memory.json", + "files-scs:config/util/variables/default.json" + ], + "@graph": [ + { + "comment": "A server that stores its resources on disk while enforcing quota." + }, + { + "@id": "urn:solid-server:default:QuotaStrategy", + "PodQuotaStrategy:_limit_amount": 7000, + "PodQuotaStrategy:_limit_unit": "bytes" + }, + { + "@id": "urn:solid-server:default:SizeReporter", + "FileSizeReporter:_ignoreFolders": [ "^/\\.internal$" ] + } + ] +} diff --git a/config/storage/README.md b/config/storage/README.md index 52e679bfb..9626d2f6a 100644 --- a/config/storage/README.md +++ b/config/storage/README.md @@ -5,7 +5,9 @@ Options related to how data and resources are stored. The final part of the ResourceStore chain that handles data access. * *dynamic*: The routing store used here is needed when using dynamic pod creation. * *file*: Default setup with a file backend. +* *global-quota-file*: File backend with a global quota over the entire server. * *memory*: Default setup with a memory backend. +* *pod-quota-file*: File backend with a max quota per pod. * *regex*: Uses a different backend based on the container that is being used. * *sparql*: Default setup with a SPARQL endpoint backend. Also updates the converting store so all incoming data is transformed into quads. diff --git a/config/storage/backend/global-quota-file.json b/config/storage/backend/global-quota-file.json new file mode 100644 index 000000000..ea40bfc6d --- /dev/null +++ b/config/storage/backend/global-quota-file.json @@ -0,0 +1,17 @@ +{ + "@context": "https://linkedsoftwaredependencies.org/bundles/npm/@solid/community-server/^2.0.0/components/context.jsonld", + "import": [ + "files-scs:config/storage/backend/quota/global-quota-file.json", + "files-scs:config/storage/backend/quota/quota-file.json" + ], + "@graph": [ + { + "comment": "A global quota store setup with a file system backend.", + "@id": "urn:solid-server:default:ResourceStore_Backend", + "@type": "DataAccessorBasedStore", + "identifierStrategy": { "@id": "urn:solid-server:default:IdentifierStrategy" }, + "auxiliaryStrategy": { "@id": "urn:solid-server:default:AuxiliaryStrategy" }, + "accessor": { "@id": "urn:solid-server:default:FileDataAccessor" } + } + ] +} diff --git a/config/storage/backend/pod-quota-file.json b/config/storage/backend/pod-quota-file.json new file mode 100644 index 000000000..00da72df5 --- /dev/null +++ b/config/storage/backend/pod-quota-file.json @@ -0,0 +1,17 @@ +{ + "@context": "https://linkedsoftwaredependencies.org/bundles/npm/@solid/community-server/^2.0.0/components/context.jsonld", + "import": [ + "files-scs:config/storage/backend/quota/pod-quota-file.json", + "files-scs:config/storage/backend/quota/quota-file.json" + ], + "@graph": [ + { + "comment": "A pod quota store setup with a file system backend.", + "@id": "urn:solid-server:default:ResourceStore_Backend", + "@type": "DataAccessorBasedStore", + "identifierStrategy": { "@id": "urn:solid-server:default:IdentifierStrategy" }, + "auxiliaryStrategy": { "@id": "urn:solid-server:default:AuxiliaryStrategy" }, + "accessor": { "@id": "urn:solid-server:default:FileDataAccessor" } + } + ] +} diff --git a/config/storage/backend/quota/global-quota-file.json b/config/storage/backend/quota/global-quota-file.json new file mode 100644 index 000000000..d61cd8151 --- /dev/null +++ b/config/storage/backend/quota/global-quota-file.json @@ -0,0 +1,13 @@ +{ + "@context": "https://linkedsoftwaredependencies.org/bundles/npm/@solid/community-server/^2.0.0/components/context.jsonld", + "comment": "Configuration of a GlobalQuotaStrategy to enforce quota globally on the server.", + "@graph": [ + { + "comment": "Enforces quota globally for all data on the server", + "@id": "urn:solid-server:default:QuotaStrategy", + "@type": "GlobalQuotaStrategy", + "reporter": { "@id": "urn:solid-server:default:SizeReporter" }, + "base": { "@id": "urn:solid-server:default:variable:baseUrl" } + } + ] +} diff --git a/config/storage/backend/quota/pod-quota-file.json b/config/storage/backend/quota/pod-quota-file.json new file mode 100644 index 000000000..f36529c60 --- /dev/null +++ b/config/storage/backend/quota/pod-quota-file.json @@ -0,0 +1,14 @@ +{ + "@context": "https://linkedsoftwaredependencies.org/bundles/npm/@solid/community-server/^2.0.0/components/context.jsonld", + "comment": "Configuration of a PodQuotaStrategy to enforce pod quotas on the server.", + "@graph": [ + { + "comment": "Enforces quota for all data per pod on the server", + "@id": "urn:solid-server:default:QuotaStrategy", + "@type": "PodQuotaStrategy", + "reporter": { "@id": "urn:solid-server:default:SizeReporter" }, + "accessor": { "@id": "urn:solid-server:default:AtomicFileDataAccessor" }, + "identifierStrategy": { "@id": "urn:solid-server:default:IdentifierStrategy" } + } + ] +} diff --git a/config/storage/backend/quota/quota-file.json b/config/storage/backend/quota/quota-file.json new file mode 100644 index 000000000..03516a5e0 --- /dev/null +++ b/config/storage/backend/quota/quota-file.json @@ -0,0 +1,37 @@ +{ + "@context": "https://linkedsoftwaredependencies.org/bundles/npm/@solid/community-server/^2.0.0/components/context.jsonld", + "comment": "DataAccessor configuration using a QuotaStrategy to enforce quota on the server.", + "@graph": [ + { + "comment": "DataAccessor that writes data to the disk with atomicity in mind", + "@id": "urn:solid-server:default:AtomicFileDataAccessor", + "@type": "AtomicFileDataAccessor", + "resourceMapper": { "@id": "urn:solid-server:default:FileIdentifierMapper" }, + "rootFilePath": { "@id": "urn:solid-server:default:variable:rootFilePath" }, + "tempFilePath": "/.internal/tempFiles/" + }, + + { + "comment": "Calculates the space already taken up by a resource", + "@id": "urn:solid-server:default:SizeReporter", + "@type": "FileSizeReporter", + "fileIdentifierMapper": { "@id": "urn:solid-server:default:FileIdentifierMapper" }, + "rootFilePath": { "@id": "urn:solid-server:default:variable:rootFilePath" } + }, + + { + "comment": "Validates the data being written to the server", + "@id": "urn:solid-server:default:QuotaValidator", + "@type": "QuotaValidator", + "strategy": { "@id": "urn:solid-server:default:QuotaStrategy" } + }, + + { + "comment": "Simple wrapper for another DataAccessor but adds validation", + "@id": "urn:solid-server:default:FileDataAccessor", + "@type": "ValidatingDataAccessor", + "accessor": { "@id": "urn:solid-server:default:AtomicFileDataAccessor" }, + "validator": { "@id": "urn:solid-server:default:QuotaValidator" } + } + ] +} diff --git a/src/http/auxiliary/ComposedAuxiliaryStrategy.ts b/src/http/auxiliary/ComposedAuxiliaryStrategy.ts index 25f576b06..8fe47ab3e 100644 --- a/src/http/auxiliary/ComposedAuxiliaryStrategy.ts +++ b/src/http/auxiliary/ComposedAuxiliaryStrategy.ts @@ -58,7 +58,10 @@ export class ComposedAuxiliaryStrategy implements AuxiliaryStrategy { public async validate(representation: Representation): Promise { if (this.validator) { - return this.validator.handleSafe(representation); + await this.validator.handleSafe({ + representation, + identifier: { path: representation.metadata.identifier.value }, + }); } } } diff --git a/src/http/auxiliary/RdfValidator.ts b/src/http/auxiliary/RdfValidator.ts index a9fc56eea..e2a4a72cb 100644 --- a/src/http/auxiliary/RdfValidator.ts +++ b/src/http/auxiliary/RdfValidator.ts @@ -3,6 +3,7 @@ import type { RepresentationConverter } from '../../storage/conversion/Represent import { INTERNAL_QUADS } from '../../util/ContentTypes'; import { cloneRepresentation } from '../../util/ResourceUtil'; import type { Representation } from '../representation/Representation'; +import type { ValidatorInput } from './Validator'; import { Validator } from './Validator'; /** @@ -17,12 +18,11 @@ export class RdfValidator extends Validator { this.converter = converter; } - public async handle(representation: Representation): Promise { + public async handle({ representation, identifier }: ValidatorInput): Promise { // If the data already is quads format we know it's RDF if (representation.metadata.contentType === INTERNAL_QUADS) { - return; + return representation; } - const identifier = { path: representation.metadata.identifier.value }; const preferences = { type: { [INTERNAL_QUADS]: 1 }}; let result; try { @@ -39,5 +39,7 @@ export class RdfValidator extends Validator { } // Drain stream to make sure data was parsed correctly await arrayifyStream(result.data); + + return representation; } } diff --git a/src/http/auxiliary/Validator.ts b/src/http/auxiliary/Validator.ts index 38a83f3d8..974cb4555 100644 --- a/src/http/auxiliary/Validator.ts +++ b/src/http/auxiliary/Validator.ts @@ -1,7 +1,13 @@ import { AsyncHandler } from '../../util/handlers/AsyncHandler'; import type { Representation } from '../representation/Representation'; +import type { ResourceIdentifier } from '../representation/ResourceIdentifier'; + +export type ValidatorInput = { + representation: Representation; + identifier: ResourceIdentifier; +}; /** * Generic interface for classes that validate Representations in some way. */ -export abstract class Validator extends AsyncHandler { } +export abstract class Validator extends AsyncHandler { } diff --git a/src/http/input/metadata/ContentLengthParser.ts b/src/http/input/metadata/ContentLengthParser.ts new file mode 100644 index 000000000..a0cf84954 --- /dev/null +++ b/src/http/input/metadata/ContentLengthParser.ts @@ -0,0 +1,23 @@ +import { getLoggerFor } from '../../../logging/LogUtil'; +import type { HttpRequest } from '../../../server/HttpRequest'; +import type { RepresentationMetadata } from '../../representation/RepresentationMetadata'; +import { MetadataParser } from './MetadataParser'; + +/** + * Parser for the `content-length` header. + */ +export class ContentLengthParser extends MetadataParser { + protected readonly logger = getLoggerFor(this); + + public async handle(input: { request: HttpRequest; metadata: RepresentationMetadata }): Promise { + const contentLength = input.request.headers['content-length']; + if (contentLength) { + const length = /^\s*(\d+)\s*(?:;.*)?$/u.exec(contentLength)?.[1]; + if (length) { + input.metadata.contentLength = Number(length); + } else { + this.logger.warn(`Invalid content-length header found: ${contentLength}.`); + } + } + } +} diff --git a/src/http/representation/RepresentationMetadata.ts b/src/http/representation/RepresentationMetadata.ts index a998c5950..2cb3402a3 100644 --- a/src/http/representation/RepresentationMetadata.ts +++ b/src/http/representation/RepresentationMetadata.ts @@ -2,8 +2,8 @@ import { DataFactory, Store } from 'n3'; import type { BlankNode, DefaultGraph, Literal, NamedNode, Quad, Term } from 'rdf-js'; import { getLoggerFor } from '../../logging/LogUtil'; import { InternalServerError } from '../../util/errors/InternalServerError'; -import { toNamedTerm, toObjectTerm, toCachedNamedNode, isTerm } from '../../util/TermUtil'; -import { CONTENT_TYPE, CONTENT_TYPE_TERM } from '../../util/Vocabularies'; +import { toNamedTerm, toObjectTerm, toCachedNamedNode, isTerm, toLiteral } from '../../util/TermUtil'; +import { CONTENT_TYPE, CONTENT_TYPE_TERM, CONTENT_LENGTH_TERM, XSD } from '../../util/Vocabularies'; import type { ResourceIdentifier } from './ResourceIdentifier'; import { isResourceIdentifier } from './ResourceIdentifier'; @@ -316,4 +316,18 @@ export class RepresentationMetadata { public set contentType(input) { this.set(CONTENT_TYPE_TERM, input); } + + /** + * Shorthand for the CONTENT_LENGTH predicate. + */ + public get contentLength(): number | undefined { + const length = this.get(CONTENT_LENGTH_TERM); + return length?.value ? Number(length.value) : undefined; + } + + public set contentLength(input) { + if (input) { + this.set(CONTENT_LENGTH_TERM, toLiteral(input, XSD.terms.integer)); + } + } } diff --git a/src/index.ts b/src/index.ts index 6e77d9807..964d4e2a7 100644 --- a/src/index.ts +++ b/src/index.ts @@ -21,10 +21,10 @@ export * from './authorization/permissions/MethodModesExtractor'; export * from './authorization/permissions/SparqlPatchModesExtractor'; // Authorization -export * from './authorization/OwnerPermissionReader'; export * from './authorization/AllStaticReader'; export * from './authorization/Authorizer'; export * from './authorization/AuxiliaryReader'; +export * from './authorization/OwnerPermissionReader'; export * from './authorization/PathBasedReader'; export * from './authorization/PermissionBasedAuthorizer'; export * from './authorization/PermissionReader'; @@ -57,6 +57,7 @@ export * from './http/input/identifier/OriginalUrlExtractor'; export * from './http/input/identifier/TargetExtractor'; // HTTP/Input/Metadata +export * from './http/input/metadata/ContentLengthParser'; export * from './http/input/metadata/ContentTypeParser'; export * from './http/input/metadata/LinkRelParser'; export * from './http/input/metadata/MetadataParser'; @@ -248,10 +249,14 @@ export * from './server/util/RedirectAllHttpHandler'; export * from './server/util/RouterHandler'; // Storage/Accessors +export * from './storage/accessors/AtomicDataAccessor'; +export * from './storage/accessors/AtomicFileDataAccessor'; export * from './storage/accessors/DataAccessor'; export * from './storage/accessors/FileDataAccessor'; export * from './storage/accessors/InMemoryDataAccessor'; +export * from './storage/accessors/PassthroughDataAccessor'; export * from './storage/accessors/SparqlDataAccessor'; +export * from './storage/accessors/ValidatingDataAccessor'; // Storage/Conversion export * from './storage/conversion/BaseTypedRepresentationConverter'; @@ -295,6 +300,11 @@ export * from './storage/patch/RepresentationPatcher'; export * from './storage/patch/RepresentationPatchHandler'; export * from './storage/patch/SparqlUpdatePatcher'; +// Storage/Quota +export * from './storage/quota/GlobalQuotaStrategy'; +export * from './storage/quota/PodQuotaStrategy'; +export * from './storage/quota/QuotaStrategy'; + // Storage/Routing export * from './storage/routing/BaseUrlRouterRule'; export * from './storage/routing/ConvertingRouterRule'; @@ -302,6 +312,14 @@ export * from './storage/routing/PreferenceSupport'; export * from './storage/routing/RegexRouterRule'; export * from './storage/routing/RouterRule'; +// Storage/Size-Reporter +export * from './storage/size-reporter/FileSizeReporter'; +export * from './storage/size-reporter/Size'; +export * from './storage/size-reporter/SizeReporter'; + +// Storage/Validators +export * from './storage/validators/QuotaValidator'; + // Storage export * from './storage/AtomicResourceStore'; export * from './storage/BaseResourceStore'; diff --git a/src/storage/accessors/AtomicDataAccessor.ts b/src/storage/accessors/AtomicDataAccessor.ts new file mode 100644 index 000000000..3184167ec --- /dev/null +++ b/src/storage/accessors/AtomicDataAccessor.ts @@ -0,0 +1,10 @@ +import type { DataAccessor } from './DataAccessor'; + +/** + * The AtomicDataAccessor interface has identical function signatures as + * the DataAccessor, with the additional constraint that every function call + * must be atomic in its effect: either the call fully succeeds, reaching the + * desired new state; or it fails, upon which the resulting state remains + * identical to the one before the call. + */ +export interface AtomicDataAccessor extends DataAccessor { } diff --git a/src/storage/accessors/AtomicFileDataAccessor.ts b/src/storage/accessors/AtomicFileDataAccessor.ts new file mode 100644 index 000000000..6eb5f4ac5 --- /dev/null +++ b/src/storage/accessors/AtomicFileDataAccessor.ts @@ -0,0 +1,62 @@ +import { mkdirSync, promises as fsPromises } from 'fs'; +import type { Readable } from 'stream'; +import { v4 } from 'uuid'; +import type { RepresentationMetadata } from '../../http/representation/RepresentationMetadata'; +import type { ResourceIdentifier } from '../../http/representation/ResourceIdentifier'; +import type { Guarded } from '../../util/GuardedStream'; +import { joinFilePath } from '../../util/PathUtil'; +import type { FileIdentifierMapper } from '../mapping/FileIdentifierMapper'; +import type { AtomicDataAccessor } from './AtomicDataAccessor'; +import { FileDataAccessor } from './FileDataAccessor'; + +/** + * AtomicDataAccessor that uses the file system to store documents as files and containers as folders. + * Data will first be written to a temporary location and only if no errors occur + * will the data be written to the desired location. + */ +export class AtomicFileDataAccessor extends FileDataAccessor implements AtomicDataAccessor { + private readonly tempFilePath: string; + + public constructor(resourceMapper: FileIdentifierMapper, rootFilePath: string, tempFilePath: string) { + super(resourceMapper); + this.tempFilePath = joinFilePath(rootFilePath, tempFilePath); + // Cannot use fsPromises in constructor + mkdirSync(this.tempFilePath, { recursive: true }); + } + + /** + * Writes the given data as a file (and potential metadata as additional file). + * Data will first be written to a temporary file and if no errors occur only then the + * file will be moved to desired destination. + * If the stream errors it is made sure the temporary file will be deleted. + * The metadata file will only be written if the data was written successfully. + */ + public async writeDocument(identifier: ResourceIdentifier, data: Guarded, metadata: RepresentationMetadata): + Promise { + const link = await this.resourceMapper.mapUrlToFilePath(identifier, false, metadata.contentType); + + // Generate temporary file name + const tempFilePath = joinFilePath(this.tempFilePath, `temp-${v4()}.txt`); + + try { + await this.writeDataFile(tempFilePath, data); + + // Check if we already have a corresponding file with a different extension + await this.verifyExistingExtension(link); + + // When no quota errors occur move the file to its desired location + await fsPromises.rename(tempFilePath, link.filePath); + } catch (error: unknown) { + // Delete the data already written + try { + if ((await this.getStats(tempFilePath)).isFile()) { + await fsPromises.unlink(tempFilePath); + } + } catch { + throw error; + } + throw error; + } + await this.writeMetadata(link, metadata); + } +} diff --git a/src/storage/accessors/FileDataAccessor.ts b/src/storage/accessors/FileDataAccessor.ts index 9efde8d3b..fd6cebc56 100644 --- a/src/storage/accessors/FileDataAccessor.ts +++ b/src/storage/accessors/FileDataAccessor.ts @@ -22,7 +22,7 @@ import type { DataAccessor } from './DataAccessor'; * DataAccessor that uses the file system to store documents as files and containers as folders. */ export class FileDataAccessor implements DataAccessor { - private readonly resourceMapper: FileIdentifierMapper; + protected readonly resourceMapper: FileIdentifierMapper; public constructor(resourceMapper: FileIdentifierMapper) { this.resourceMapper = resourceMapper; @@ -149,7 +149,7 @@ export class FileDataAccessor implements DataAccessor { * @throws NotFoundHttpError * If the file/folder doesn't exist. */ - private async getStats(path: string): Promise { + protected async getStats(path: string): Promise { try { return await fsPromises.stat(path); } catch (error: unknown) { @@ -192,7 +192,7 @@ export class FileDataAccessor implements DataAccessor { * * @returns True if data was written to a file. */ - private async writeMetadata(link: ResourceLink, metadata: RepresentationMetadata): Promise { + protected async writeMetadata(link: ResourceLink, metadata: RepresentationMetadata): Promise { // These are stored by file system conventions metadata.remove(RDF.terms.type, LDP.terms.Resource); metadata.remove(RDF.terms.type, LDP.terms.Container); @@ -327,7 +327,7 @@ export class FileDataAccessor implements DataAccessor { * * @param link - ResourceLink corresponding to the new resource data. */ - private async verifyExistingExtension(link: ResourceLink): Promise { + protected async verifyExistingExtension(link: ResourceLink): Promise { try { // Delete the old file with the (now) wrong extension const oldLink = await this.resourceMapper.mapUrlToFilePath(link.identifier, false); @@ -347,11 +347,14 @@ export class FileDataAccessor implements DataAccessor { * @param path - The filepath of the file to be created. * @param data - The data to be put in the file. */ - private async writeDataFile(path: string, data: Readable): Promise { + protected async writeDataFile(path: string, data: Readable): Promise { return new Promise((resolve, reject): any => { const writeStream = createWriteStream(path); data.pipe(writeStream); - data.on('error', reject); + data.on('error', (error): void => { + reject(error); + writeStream.end(); + }); writeStream.on('error', reject); writeStream.on('finish', resolve); diff --git a/src/storage/accessors/PassthroughDataAccessor.ts b/src/storage/accessors/PassthroughDataAccessor.ts new file mode 100644 index 000000000..1af6eb333 --- /dev/null +++ b/src/storage/accessors/PassthroughDataAccessor.ts @@ -0,0 +1,49 @@ +import type { Readable } from 'stream'; +import type { Representation } from '../../http/representation/Representation'; +import type { RepresentationMetadata } from '../../http/representation/RepresentationMetadata'; +import type { ResourceIdentifier } from '../../http/representation/ResourceIdentifier'; +import type { Guarded } from '../../util/GuardedStream'; +import type { AtomicDataAccessor } from './AtomicDataAccessor'; +import type { DataAccessor } from './DataAccessor'; + +/** + * DataAccessor that calls the corresponding functions of the source DataAccessor. + * Can be extended by data accessors that do not want to override all functions + * by implementing a decorator pattern. + */ +export class PassthroughDataAccessor implements DataAccessor { + protected readonly accessor: AtomicDataAccessor; + + public constructor(accessor: DataAccessor) { + this.accessor = accessor; + } + + public async writeDocument(identifier: ResourceIdentifier, data: Guarded, metadata: RepresentationMetadata): + Promise { + return this.accessor.writeDocument(identifier, data, metadata); + } + + public async writeContainer(identifier: ResourceIdentifier, metadata: RepresentationMetadata): Promise { + return this.accessor.writeContainer(identifier, metadata); + } + + public async canHandle(representation: Representation): Promise { + return this.accessor.canHandle(representation); + } + + public async getData(identifier: ResourceIdentifier): Promise> { + return this.accessor.getData(identifier); + } + + public async getMetadata(identifier: ResourceIdentifier): Promise { + return this.accessor.getMetadata(identifier); + } + + public getChildren(identifier: ResourceIdentifier): AsyncIterableIterator { + return this.accessor.getChildren(identifier); + } + + public async deleteResource(identifier: ResourceIdentifier): Promise { + return this.accessor.deleteResource(identifier); + } +} diff --git a/src/storage/accessors/ValidatingDataAccessor.ts b/src/storage/accessors/ValidatingDataAccessor.ts new file mode 100644 index 000000000..394b4c7cb --- /dev/null +++ b/src/storage/accessors/ValidatingDataAccessor.ts @@ -0,0 +1,40 @@ +import type { Readable } from 'stream'; +import type { Validator } from '../../http/auxiliary/Validator'; +import { BasicRepresentation } from '../../http/representation/BasicRepresentation'; +import type { RepresentationMetadata } from '../../http/representation/RepresentationMetadata'; +import type { ResourceIdentifier } from '../../http/representation/ResourceIdentifier'; +import type { Guarded } from '../../util/GuardedStream'; +import type { DataAccessor } from './DataAccessor'; +import { PassthroughDataAccessor } from './PassthroughDataAccessor'; + +/** + * A ValidatingDataAccessor wraps a DataAccessor such that the data stream is validated while being written. + * An AtomicDataAccessor can be used to prevent data being written in case validation fails. + */ +export class ValidatingDataAccessor extends PassthroughDataAccessor { + private readonly validator: Validator; + + public constructor(accessor: DataAccessor, validator: Validator) { + super(accessor); + this.validator = validator; + } + + public async writeDocument( + identifier: ResourceIdentifier, + data: Guarded, + metadata: RepresentationMetadata, + ): Promise { + const pipedRep = await this.validator.handleSafe({ + representation: new BasicRepresentation(data, metadata), + identifier, + }); + return this.accessor.writeDocument(identifier, pipedRep.data, metadata); + } + + public async writeContainer(identifier: ResourceIdentifier, metadata: RepresentationMetadata): Promise { + // A container's data mainly resides in its metadata, + // of which we can't calculate the disk size of at this point in the code. + // Extra info can be found here: https://github.com/solid/community-server/pull/973#discussion_r723376888 + return this.accessor.writeContainer(identifier, metadata); + } +} diff --git a/src/storage/quota/GlobalQuotaStrategy.ts b/src/storage/quota/GlobalQuotaStrategy.ts new file mode 100644 index 000000000..0800cbb3d --- /dev/null +++ b/src/storage/quota/GlobalQuotaStrategy.ts @@ -0,0 +1,19 @@ +import type { Size } from '../size-reporter/Size'; +import type { SizeReporter } from '../size-reporter/SizeReporter'; +import { QuotaStrategy } from './QuotaStrategy'; + +/** + * The GlobalQuotaStrategy sets a limit on the amount of data stored on the server globally. + */ +export class GlobalQuotaStrategy extends QuotaStrategy { + private readonly base: string; + + public constructor(limit: Size, reporter: SizeReporter, base: string) { + super(reporter, limit); + this.base = base; + } + + protected async getTotalSpaceUsed(): Promise { + return this.reporter.getSize({ path: this.base }); + } +} diff --git a/src/storage/quota/PodQuotaStrategy.ts b/src/storage/quota/PodQuotaStrategy.ts new file mode 100644 index 000000000..803d59501 --- /dev/null +++ b/src/storage/quota/PodQuotaStrategy.ts @@ -0,0 +1,66 @@ +import type { RepresentationMetadata } from '../../http/representation/RepresentationMetadata'; +import type { ResourceIdentifier } from '../../http/representation/ResourceIdentifier'; +import { NotFoundHttpError } from '../../util/errors/NotFoundHttpError'; +import type { IdentifierStrategy } from '../../util/identifiers/IdentifierStrategy'; +import { RDF, PIM } from '../../util/Vocabularies'; +import type { DataAccessor } from '../accessors/DataAccessor'; +import type { Size } from '../size-reporter/Size'; +import type { SizeReporter } from '../size-reporter/SizeReporter'; +import { QuotaStrategy } from './QuotaStrategy'; + +/** + * The PodQuotaStrategy sets a limit on the amount of data stored on a per pod basis + */ +export class PodQuotaStrategy extends QuotaStrategy { + private readonly identifierStrategy: IdentifierStrategy; + private readonly accessor: DataAccessor; + + public constructor( + limit: Size, + reporter: SizeReporter, + identifierStrategy: IdentifierStrategy, + accessor: DataAccessor, + ) { + super(reporter, limit); + this.identifierStrategy = identifierStrategy; + this.accessor = accessor; + } + + protected async getTotalSpaceUsed(identifier: ResourceIdentifier): Promise { + const pimStorage = await this.searchPimStorage(identifier); + + // No storage was found containing this identifier, so we assume this identifier points to an internal location. + // Quota does not apply here so there is always available space. + if (!pimStorage) { + return { amount: Number.MAX_SAFE_INTEGER, unit: this.limit.unit }; + } + + return this.reporter.getSize(pimStorage); + } + + /** Finds the closest parent container that has pim:storage as metadata */ + private async searchPimStorage(identifier: ResourceIdentifier): Promise { + if (this.identifierStrategy.isRootContainer(identifier)) { + return; + } + + let metadata: RepresentationMetadata; + const parent = this.identifierStrategy.getParentContainer(identifier); + + try { + metadata = await this.accessor.getMetadata(identifier); + } catch (error: unknown) { + if (error instanceof NotFoundHttpError) { + // Resource and/or its metadata do not exist + return this.searchPimStorage(parent); + } + throw error; + } + + const hasPimStorageMetadata = metadata!.getAll(RDF.type) + .some((term): boolean => term.value === PIM.Storage); + + return hasPimStorageMetadata ? identifier : this.searchPimStorage(parent); + } +} + diff --git a/src/storage/quota/QuotaStrategy.ts b/src/storage/quota/QuotaStrategy.ts new file mode 100644 index 000000000..2877cdd24 --- /dev/null +++ b/src/storage/quota/QuotaStrategy.ts @@ -0,0 +1,105 @@ +// These two eslint lines are needed to store 'this' in a variable so it can be used +// in the PassThrough of createQuotaGuard +/* eslint-disable @typescript-eslint/no-this-alias */ +/* eslint-disable consistent-this */ +import { PassThrough } from 'stream'; +import type { RepresentationMetadata } from '../../http/representation/RepresentationMetadata'; +import type { ResourceIdentifier } from '../../http/representation/ResourceIdentifier'; +import { PayloadHttpError } from '../../util/errors/PayloadHttpError'; +import type { Guarded } from '../../util/GuardedStream'; +import { guardStream } from '../../util/GuardedStream'; +import type { Size } from '../size-reporter/Size'; +import type { SizeReporter } from '../size-reporter/SizeReporter'; + +/** + * A QuotaStrategy is used when we want to set a limit to the amount of data that can be + * stored on the server. + * This can range from a limit for the whole server to a limit on a per pod basis. + * The way the size of a resource is calculated is implemented by the implementing classes. + * This can be bytes, quads, file count, ... + */ +export abstract class QuotaStrategy { + public readonly reporter: SizeReporter; + public readonly limit: Size; + + public constructor(reporter: SizeReporter, limit: Size) { + this.reporter = reporter; + this.limit = limit; + } + + /** + * Get the available space when writing data to the given identifier. + * If the given resource already exists it will deduct the already taken up + * space by that resource since it is going to be overwritten and thus counts + * as available space. + * + * @param identifier - the identifier of the resource of which you want the available space + * @returns the available space and the unit of the space as a Size object + */ + public async getAvailableSpace(identifier: ResourceIdentifier): Promise { + const totalUsed = await this.getTotalSpaceUsed(identifier); + + // Ignore identifiers where quota does not apply + if (totalUsed.amount === Number.MAX_SAFE_INTEGER) { + return totalUsed; + } + + // When a file is overwritten the space the file takes up right now should also + // be counted as available space as it will disappear/be overwritten + totalUsed.amount -= (await this.reporter.getSize(identifier)).amount; + + return { + amount: this.limit.amount - totalUsed.amount, + unit: this.limit.unit, + }; + } + + /** + * Get the currently used/occupied space. + * + * @param identifier - the identifier that should be used to calculate the total + * @returns a Size object containing the requested value. + * If quota is not relevant for this identifier, Size.amount should be Number.MAX_SAFE_INTEGER + */ + protected abstract getTotalSpaceUsed(identifier: ResourceIdentifier): Promise; + + /** + * Get an estimated size of the resource + * + * @param metadata - the metadata that might include the size + * @returns a Size object containing the estimated size and unit of the resource + */ + public async estimateSize(metadata: RepresentationMetadata): Promise { + const estimate = await this.reporter.estimateSize(metadata); + return estimate ? { unit: this.limit.unit, amount: estimate } : undefined; + } + + /** + * Get a Passthrough stream that will keep track of the available space. + * If the quota is exceeded the stream will emit an error and destroy itself. + * Like other Passthrough instances this will simply pass on the chunks, when the quota isn't exceeded. + * + * @param identifier - the identifier of the resource in question + * @returns a Passthrough instance that errors when quota is exceeded + */ + public async createQuotaGuard(identifier: ResourceIdentifier): Promise> { + let total = 0; + const strategy = this; + const { reporter } = this; + + return guardStream(new PassThrough({ + async transform(this, chunk: any, enc: string, done: () => void): Promise { + total += await reporter.calculateChunkSize(chunk); + const availableSpace = await strategy.getAvailableSpace(identifier); + if (availableSpace.amount < total) { + this.destroy(new PayloadHttpError( + `Quota exceeded by ${total - availableSpace.amount} ${availableSpace.unit} during write`, + )); + } + + this.push(chunk); + done(); + }, + })); + } +} diff --git a/src/storage/size-reporter/FileSizeReporter.ts b/src/storage/size-reporter/FileSizeReporter.ts new file mode 100644 index 000000000..153168677 --- /dev/null +++ b/src/storage/size-reporter/FileSizeReporter.ts @@ -0,0 +1,87 @@ +import type { Stats } from 'fs'; +import { promises as fsPromises } from 'fs'; +import type { RepresentationMetadata } from '../../http/representation/RepresentationMetadata'; +import type { ResourceIdentifier } from '../../http/representation/ResourceIdentifier'; +import { joinFilePath, normalizeFilePath, trimTrailingSlashes } from '../../util/PathUtil'; +import type { FileIdentifierMapper } from '../mapping/FileIdentifierMapper'; +import type { Size } from './Size'; +import { UNIT_BYTES } from './Size'; +import type { SizeReporter } from './SizeReporter'; + +/** + * SizeReporter that is used to calculate sizes of resources for a file based system. + */ +export class FileSizeReporter implements SizeReporter { + private readonly fileIdentifierMapper: FileIdentifierMapper; + private readonly ignoreFolders: RegExp[]; + private readonly rootFilePath: string; + + public constructor(fileIdentifierMapper: FileIdentifierMapper, rootFilePath: string, ignoreFolders?: string[]) { + this.fileIdentifierMapper = fileIdentifierMapper; + this.ignoreFolders = ignoreFolders ? ignoreFolders.map((folder: string): RegExp => new RegExp(folder, 'u')) : []; + this.rootFilePath = normalizeFilePath(rootFilePath); + } + + /** The FileSizeReporter will always return data in the form of bytes */ + public getUnit(): string { + return UNIT_BYTES; + } + + /** + * Returns the size of the given resource ( and its children ) in bytes + */ + public async getSize(identifier: ResourceIdentifier): Promise { + const fileLocation = (await this.fileIdentifierMapper.mapUrlToFilePath(identifier, false)).filePath; + + return { unit: this.getUnit(), amount: await this.getTotalSize(fileLocation) }; + } + + public async calculateChunkSize(chunk: string): Promise { + return chunk.length; + } + + /** The estimated size of a resource in this reporter is simply the content-length header */ + public async estimateSize(metadata: RepresentationMetadata): Promise { + return metadata.contentLength; + } + + /** + * Get the total size of a resource and its children if present + * + * @param fileLocation - the resource of which you want the total size of ( on disk ) + * @returns a number specifying how many bytes are used by the resource + */ + private async getTotalSize(fileLocation: string): Promise { + let stat: Stats; + + // Check if the file exists + try { + stat = await fsPromises.stat(fileLocation); + } catch { + return 0; + } + + // If the file's location points to a file, simply return the file's size + if (stat.isFile()) { + return stat.size; + } + + // If the location DOES exist and is NOT a file it should be a directory + // recursively add all sizes of children to the total + const childFiles = await fsPromises.readdir(fileLocation); + const rootFilePathLength = trimTrailingSlashes(this.rootFilePath).length; + + return await childFiles.reduce(async(acc: Promise, current): Promise => { + const childFileLocation = normalizeFilePath(joinFilePath(fileLocation, current)); + let result = await acc; + + // Exclude internal files + if (!this.ignoreFolders.some((folder: RegExp): boolean => + folder.test(childFileLocation.slice(rootFilePathLength)))) { + result += await this.getTotalSize(childFileLocation); + } + + return result; + }, Promise.resolve(stat.size)); + } +} diff --git a/src/storage/size-reporter/Size.ts b/src/storage/size-reporter/Size.ts new file mode 100644 index 000000000..26987179d --- /dev/null +++ b/src/storage/size-reporter/Size.ts @@ -0,0 +1,9 @@ +/** + * Describes the size of something by stating how much of a certain unit is present. + */ +export interface Size { + unit: string; + amount: number; +} + +export const UNIT_BYTES = 'bytes'; diff --git a/src/storage/size-reporter/SizeReporter.ts b/src/storage/size-reporter/SizeReporter.ts new file mode 100644 index 000000000..30ec5d59b --- /dev/null +++ b/src/storage/size-reporter/SizeReporter.ts @@ -0,0 +1,44 @@ +import type { RepresentationMetadata } from '../../http/representation/RepresentationMetadata'; +import type { ResourceIdentifier } from '../../http/representation/ResourceIdentifier'; +import type { Size } from './Size'; + +/** + * A SizeReporter's only purpose (at the moment) is to calculate the size + * of a resource. How the size is calculated or what unit it is in is defined by + * the class implementing this interface. + * One might use the amount of bytes and another might use the amount of triples + * stored in a resource. + */ +export interface SizeReporter { + + /** + * Get the unit as a string in which a SizeReporter returns data + */ + getUnit: () => string; + + /** + * Get the size of a given resource + * + * @param identifier - the resource of which you want the size + * @returns The size of the resource as a Size object calculated recursively + * if the identifier leads to a container + */ + getSize: (identifier: ResourceIdentifier) => Promise; + + /** + * Calculate the size of a chunk based on which SizeReporter is being used + * + * @param chunk - the chunk of which you want the size + * @returns the size of the passed chunk as a number + */ + calculateChunkSize: (chunk: T) => Promise; + + /** + * Estimate the size of a body / request by looking at its metadata + * + * @param metadata - the metadata of the resource you want an estimated size of + * @returns the estimated size of the body / request or undefined if no + * meaningful estimation can be made + */ + estimateSize: (metadata: RepresentationMetadata) => Promise; +} diff --git a/src/storage/validators/QuotaValidator.ts b/src/storage/validators/QuotaValidator.ts new file mode 100644 index 000000000..f0993a0ed --- /dev/null +++ b/src/storage/validators/QuotaValidator.ts @@ -0,0 +1,61 @@ +import { Readable, PassThrough } from 'stream'; +import { Validator } from '../../http/auxiliary/Validator'; +import type { ValidatorInput } from '../../http/auxiliary/Validator'; +import type { Representation } from '../../http/representation/Representation'; +import { PayloadHttpError } from '../../util/errors/PayloadHttpError'; +import type { Guarded } from '../../util/GuardedStream'; +import { guardStream } from '../../util/GuardedStream'; +import { pipeSafely } from '../../util/StreamUtil'; +import type { QuotaStrategy } from '../quota/QuotaStrategy'; + +/** + * The QuotaValidator validates data streams by making sure they would not exceed the limits of a QuotaStrategy. + */ +export class QuotaValidator extends Validator { + private readonly strategy: QuotaStrategy; + + public constructor(strategy: QuotaStrategy) { + super(); + this.strategy = strategy; + } + + public async handle({ representation, identifier }: ValidatorInput): Promise { + const { data, metadata } = representation; + + // 1. Get the available size + const availableSize = await this.strategy.getAvailableSpace(identifier); + + // 2. Check if the estimated size is bigger then the available size + const estimatedSize = await this.strategy.estimateSize(metadata); + + if (estimatedSize && availableSize.amount < estimatedSize.amount) { + return { + ...representation, + data: guardStream(new Readable({ + read(this): void { + this.destroy(new PayloadHttpError( + `Quota exceeded: Advertised Content-Length is ${estimatedSize.amount} ${estimatedSize.unit} ` + + `and only ${availableSize.amount} ${availableSize.unit} is available`, + )); + }, + })), + }; + } + + // 3. Track if quota is exceeded during writing + const tracking: Guarded = await this.strategy.createQuotaGuard(identifier); + + // 4. Double check quota is not exceeded after write (concurrent writing possible) + const afterWrite = new PassThrough({ + flush: async(done): Promise => { + const availableSpace = (await this.strategy.getAvailableSpace(identifier)).amount; + done(availableSpace < 0 ? new PayloadHttpError('Quota exceeded after write completed') : undefined); + }, + }); + + return { + ...representation, + data: pipeSafely(pipeSafely(data, tracking), afterWrite), + }; + } +} diff --git a/src/util/Vocabularies.ts b/src/util/Vocabularies.ts index 060820e22..32384d9b7 100644 --- a/src/util/Vocabularies.ts +++ b/src/util/Vocabularies.ts @@ -86,6 +86,10 @@ export const FOAF = createUriAndTermNamespace('http://xmlns.com/foaf/0.1/', 'Agent', ); +export const HH = createUriAndTermNamespace('http://www.w3.org/2011/http-headers#', + 'content-length', +); + export const HTTP = createUriAndTermNamespace('http://www.w3.org/2011/http#', 'statusCodeNumber', ); @@ -155,6 +159,7 @@ export const XSD = createUriAndTermNamespace('http://www.w3.org/2001/XMLSchema#' ); // Alias for commonly used types +export const CONTENT_LENGTH_TERM = HH.terms['content-length']; export const CONTENT_TYPE = MA.format; export const CONTENT_TYPE_TERM = MA.terms.format; export const PREFERRED_PREFIX = VANN.preferredNamespacePrefix; diff --git a/src/util/errors/PayloadHttpError.ts b/src/util/errors/PayloadHttpError.ts new file mode 100644 index 000000000..b8fad8b5f --- /dev/null +++ b/src/util/errors/PayloadHttpError.ts @@ -0,0 +1,23 @@ +import type { HttpErrorOptions } from './HttpError'; +import { HttpError } from './HttpError'; + +/** + * An error thrown when data exceeded the pre configured quota + */ +export class PayloadHttpError extends HttpError { + /** + * Default message is 'Storage quota was exceeded.'. + * @param message - Optional, more specific, message. + * @param options - Optional error options. + */ + public constructor(message?: string, options?: HttpErrorOptions) { + super(413, + 'PayloadHttpError', + message ?? 'Storage quota was exceeded.', + options); + } + + public static isInstance(error: any): error is PayloadHttpError { + return HttpError.isInstance(error) && error.statusCode === 413; + } +} diff --git a/test/integration/Quota.test.ts b/test/integration/Quota.test.ts new file mode 100644 index 000000000..941d72969 --- /dev/null +++ b/test/integration/Quota.test.ts @@ -0,0 +1,222 @@ +import { promises as fsPromises } from 'fs'; +import type { Stats } from 'fs'; +import fetch from 'cross-fetch'; +import type { Response } from 'cross-fetch'; +import { joinFilePath, joinUrl } from '../../src'; +import type { App } from '../../src'; +import { getPort } from '../util/Util'; +import { getDefaultVariables, getTestConfigPath, getTestFolder, instantiateFromConfig, removeFolder } from './Config'; + +/** Performs a simple PUT request to the given 'path' with a body containing 'length' amount of characters */ +async function performSimplePutWithLength(path: string, length: number): Promise { + return fetch( + path, + { + method: 'PUT', + headers: { + 'content-type': 'text/plain', + }, + body: 'A'.repeat(length), + }, + ); +} + +/** Registers two test pods on the server matching the 'baseUrl' */ +async function registerTestPods(baseUrl: string, pods: string[]): Promise { + for (const pod of pods) { + await fetch(`${baseUrl}idp/register/`, { + method: 'POST', + headers: { + 'content-type': 'application/json', + }, + body: JSON.stringify({ + createWebId: 'on', + webId: '', + register: 'on', + createPod: 'on', + podName: pod, + email: `${pod}@example.ai`, + password: 't', + confirmPassword: 't', + submit: '', + }), + }); + } +} + +/* We just want a container with the correct metadata, everything else can be removed */ +async function clearInitialFiles(rootFilePath: string, pods: string[]): Promise { + for (const pod of pods) { + const fileList = await fsPromises.readdir(joinFilePath(rootFilePath, pod)); + for (const file of fileList) { + if (file !== '.meta') { + const path = joinFilePath(rootFilePath, pod, file); + if ((await fsPromises.stat(path)).isDirectory()) { + await fsPromises.rmdir(path, { recursive: true }); + } else { + await fsPromises.unlink(path); + } + } + } + } +} + +describe('A quota server', (): void => { + // The allowed quota depends on what filesystem/OS you are using. + // For example: an empty folder is reported as + // - 0KB on NTFS (most of the time, mileage may vary) + // - 0-...KB on APFS (depending on its contents and settings) + // - 4O96KB on FAT + // This is why we need to determine the size of a folder on the current system. + let folderSizeTest: Stats; + beforeAll(async(): Promise => { + // We want to use an empty folder as on APFS/Mac folder sizes vary a lot + const tempFolder = getTestFolder('quota-temp'); + await fsPromises.mkdir(tempFolder); + folderSizeTest = await fsPromises.stat(tempFolder); + await removeFolder(tempFolder); + }); + const podName1 = 'arthur'; + const podName2 = 'abel'; + + /** Test the general functionality of the server using pod quota */ + describe('with pod quota enabled', (): void => { + const port = getPort('PodQuota'); + const baseUrl = `http://localhost:${port}/`; + const pod1 = joinUrl(baseUrl, podName1); + const pod2 = joinUrl(baseUrl, podName2); + const rootFilePath = getTestFolder('quota-pod'); + + let app: App; + + beforeAll(async(): Promise => { + // Calculate the allowed quota depending on file system used + const size = folderSizeTest.size + 4000; + + const instances = await instantiateFromConfig( + 'urn:solid-server:test:Instances', + getTestConfigPath('quota-pod.json'), + { + ...getDefaultVariables(port, baseUrl), + 'urn:solid-server:default:variable:rootFilePath': rootFilePath, + 'urn:solid-server:default:variable:PodQuota': size, + }, + ) as Record; + ({ app } = instances); + await app.start(); + + // Initialize 2 pods + await registerTestPods(baseUrl, [ podName1, podName2 ]); + await clearInitialFiles(rootFilePath, [ podName1, podName2 ]); + }); + + afterAll(async(): Promise => { + await app.stop(); + await removeFolder(rootFilePath); + }); + + // Test quota in the first pod + it('should return a 413 when the quota is exceeded during write.', async(): Promise => { + const testFile1 = `${pod1}/test1.txt`; + const testFile2 = `${pod1}/test2.txt`; + + const response1 = performSimplePutWithLength(testFile1, 2000); + await expect(response1).resolves.toBeDefined(); + expect((await response1).status).toEqual(201); + + const response2 = performSimplePutWithLength(testFile2, 2500); + await expect(response2).resolves.toBeDefined(); + expect((await response2).status).toEqual(413); + }); + + // Test if writing in another pod is still possible + it('should allow writing in a pod that is not full yet.', async(): Promise => { + const testFile1 = `${pod2}/test1.txt`; + + const response1 = performSimplePutWithLength(testFile1, 2000); + await expect(response1).resolves.toBeDefined(); + expect((await response1).status).toEqual(201); + }); + + // Both pods should not accept this request anymore + it('should block PUT requests to different pods if their quota is exceeded.', async(): Promise => { + const testFile1 = `${pod1}/test2.txt`; + const testFile2 = `${pod2}/test2.txt`; + + const response1 = performSimplePutWithLength(testFile1, 2500); + await expect(response1).resolves.toBeDefined(); + expect((await response1).status).toEqual(413); + + const response2 = performSimplePutWithLength(testFile2, 2500); + await expect(response2).resolves.toBeDefined(); + expect((await response2).status).toEqual(413); + }); + }); + + /** Test the general functionality of the server using global quota */ + describe('with global quota enabled', (): void => { + const port = getPort('GlobalQuota'); + const baseUrl = `http://localhost:${port}/`; + const pod1 = `${baseUrl}${podName1}`; + const pod2 = `${baseUrl}${podName2}`; + const rootFilePath = getTestFolder('quota-global'); + + let app: App; + + beforeAll(async(): Promise => { + // Calculate the allowed quota depending on file system used + const size = (folderSizeTest.size * 3) + 4000; + + const instances = await instantiateFromConfig( + 'urn:solid-server:test:Instances', + getTestConfigPath('quota-global.json'), + { + ...getDefaultVariables(port, baseUrl), + 'urn:solid-server:default:variable:rootFilePath': rootFilePath, + 'urn:solid-server:default:variable:GlobalQuota': size, + }, + ) as Record; + ({ app } = instances); + await app.start(); + + // Initialize 2 pods + await registerTestPods(baseUrl, [ podName1, podName2 ]); + await clearInitialFiles(rootFilePath, [ podName1, podName2 ]); + }); + + afterAll(async(): Promise => { + await app.stop(); + await removeFolder(rootFilePath); + }); + + it('should return 413 when global quota is exceeded.', async(): Promise => { + const testFile1 = `${baseUrl}test1.txt`; + const testFile2 = `${baseUrl}test2.txt`; + + const response1 = performSimplePutWithLength(testFile1, 2000); + await expect(response1).resolves.toBeDefined(); + const awaitedRes1 = await response1; + expect(awaitedRes1.status).toEqual(201); + + const response2 = performSimplePutWithLength(testFile2, 2500); + await expect(response2).resolves.toBeDefined(); + const awaitedRes2 = await response2; + expect(awaitedRes2.status).toEqual(413); + }); + + it('should return 413 when trying to write to any pod when global quota is exceeded.', async(): Promise => { + const testFile1 = `${pod1}/test3.txt`; + const testFile2 = `${pod2}/test4.txt`; + + const response1 = performSimplePutWithLength(testFile1, 2500); + await expect(response1).resolves.toBeDefined(); + const awaitedRes1 = await response1; + expect(awaitedRes1.status).toEqual(413); + + const response2 = performSimplePutWithLength(testFile2, 2500); + await expect(response2).resolves.toBeDefined(); + const awaitedRes2 = await response2; + expect(awaitedRes2.status).toEqual(413); + }); + }); +}); diff --git a/test/integration/config/quota-global.json b/test/integration/config/quota-global.json new file mode 100644 index 000000000..b23a80f38 --- /dev/null +++ b/test/integration/config/quota-global.json @@ -0,0 +1,65 @@ +{ + "@context": "https://linkedsoftwaredependencies.org/bundles/npm/@solid/community-server/^2.0.0/components/context.jsonld", + "import": [ + "files-scs:config/app/main/default.json", + "files-scs:config/app/init/initialize-root.json", + "files-scs:config/app/setup/disabled.json", + "files-scs:config/http/handler/default.json", + "files-scs:config/http/middleware/websockets.json", + "files-scs:config/http/server-factory/websockets.json", + "files-scs:config/http/static/default.json", + "files-scs:config/identity/access/public.json", + "files-scs:config/identity/email/default.json", + "files-scs:config/identity/handler/default.json", + "files-scs:config/identity/ownership/token.json", + "files-scs:config/identity/pod/static.json", + "files-scs:config/identity/registration/enabled.json", + "files-scs:config/ldp/authentication/dpop-bearer.json", + "files-scs:config/ldp/authorization/allow-all.json", + "files-scs:config/ldp/handler/default.json", + "files-scs:config/ldp/metadata-parser/default.json", + "files-scs:config/ldp/metadata-writer/default.json", + "files-scs:config/ldp/modes/default.json", + "files-scs:config/storage/backend/global-quota-file.json", + "files-scs:config/storage/key-value/resource-store.json", + "files-scs:config/storage/middleware/default.json", + "files-scs:config/util/auxiliary/acl.json", + "files-scs:config/util/identifiers/suffix.json", + "files-scs:config/util/index/default.json", + "files-scs:config/util/logging/winston.json", + "files-scs:config/util/representation-conversion/default.json", + "files-scs:config/util/resource-locker/memory.json", + "files-scs:config/util/variables/default.json" + ], + "@graph": [ + { + "comment": "A single-pod server that stores its resources on disk while enforcing quota." + }, + { + "comment": "The set quota enforced globally", + "@id": "urn:solid-server:default:variable:GlobalQuota", + "@type": "Variable" + }, + { + "@id": "urn:solid-server:default:QuotaStrategy", + "GlobalQuotaStrategy:_limit_amount": { + "@id": "urn:solid-server:default:variable:GlobalQuota" + }, + "GlobalQuotaStrategy:_limit_unit": "bytes" + }, + { + "@id": "urn:solid-server:default:SizeReporter", + "FileSizeReporter:_ignoreFolders": [ "^/\\.internal$" ] + }, + { + "@id": "urn:solid-server:test:Instances", + "@type": "RecordObject", + "record": [ + { + "RecordObject:_record_key": "app", + "RecordObject:_record_value": { "@id": "urn:solid-server:default:App" } + } + ] + } + ] +} diff --git a/test/integration/config/quota-pod.json b/test/integration/config/quota-pod.json new file mode 100644 index 000000000..d2497746f --- /dev/null +++ b/test/integration/config/quota-pod.json @@ -0,0 +1,61 @@ +{ + "@context": "https://linkedsoftwaredependencies.org/bundles/npm/@solid/community-server/^2.0.0/components/context.jsonld", + "import": [ + "files-scs:config/app/main/default.json", + "files-scs:config/app/init/initialize-root.json", + "files-scs:config/app/setup/disabled.json", + "files-scs:config/http/handler/default.json", + "files-scs:config/http/middleware/websockets.json", + "files-scs:config/http/server-factory/websockets.json", + "files-scs:config/http/static/default.json", + "files-scs:config/identity/access/public.json", + "files-scs:config/identity/email/default.json", + "files-scs:config/identity/handler/default.json", + "files-scs:config/identity/ownership/token.json", + "files-scs:config/identity/pod/static.json", + "files-scs:config/identity/registration/enabled.json", + "files-scs:config/ldp/authentication/dpop-bearer.json", + "files-scs:config/ldp/authorization/allow-all.json", + "files-scs:config/ldp/handler/default.json", + "files-scs:config/ldp/metadata-parser/default.json", + "files-scs:config/ldp/metadata-writer/default.json", + "files-scs:config/ldp/modes/default.json", + "files-scs:config/storage/backend/pod-quota-file.json", + "files-scs:config/storage/key-value/resource-store.json", + "files-scs:config/storage/middleware/default.json", + "files-scs:config/util/auxiliary/acl.json", + "files-scs:config/util/identifiers/suffix.json", + "files-scs:config/util/index/default.json", + "files-scs:config/util/logging/winston.json", + "files-scs:config/util/representation-conversion/default.json", + "files-scs:config/util/resource-locker/memory.json", + "files-scs:config/util/variables/default.json" + ], + "@graph": [ + { + "comment": "A single-pod server that stores its resources on disk while enforcing quota." + }, + { + "comment": "The set quota enforced per pod", + "@id": "urn:solid-server:default:variable:PodQuota", + "@type": "Variable" + }, + { + "@id": "urn:solid-server:default:QuotaStrategy", + "PodQuotaStrategy:_limit_amount": { + "@id": "urn:solid-server:default:variable:PodQuota" + }, + "PodQuotaStrategy:_limit_unit": "bytes" + }, + { + "@id": "urn:solid-server:test:Instances", + "@type": "RecordObject", + "record": [ + { + "RecordObject:_record_key": "app", + "RecordObject:_record_value": { "@id": "urn:solid-server:default:App" } + } + ] + } + ] +} diff --git a/test/unit/http/auxiliary/ComposedAuxiliaryStrategy.test.ts b/test/unit/http/auxiliary/ComposedAuxiliaryStrategy.test.ts index 4e07aca1c..cdc961c8e 100644 --- a/test/unit/http/auxiliary/ComposedAuxiliaryStrategy.test.ts +++ b/test/unit/http/auxiliary/ComposedAuxiliaryStrategy.test.ts @@ -61,10 +61,10 @@ describe('A ComposedAuxiliaryStrategy', (): void => { }); it('validates data through the Validator.', async(): Promise => { - const representation = { data: 'data!' } as any; + const representation = { data: 'data!', metadata: { identifier: { value: 'any' }}} as any; await expect(strategy.validate(representation)).resolves.toBeUndefined(); expect(validator.handleSafe).toHaveBeenCalledTimes(1); - expect(validator.handleSafe).toHaveBeenLastCalledWith(representation); + expect(validator.handleSafe).toHaveBeenLastCalledWith({ representation, identifier: { path: 'any' }}); }); it('defaults isRequiredInRoot to false.', async(): Promise => { diff --git a/test/unit/http/auxiliary/RdfValidator.test.ts b/test/unit/http/auxiliary/RdfValidator.test.ts index 71b5b48a0..16e3fe8f4 100644 --- a/test/unit/http/auxiliary/RdfValidator.test.ts +++ b/test/unit/http/auxiliary/RdfValidator.test.ts @@ -1,5 +1,6 @@ import { RdfValidator } from '../../../../src/http/auxiliary/RdfValidator'; import { BasicRepresentation } from '../../../../src/http/representation/BasicRepresentation'; +import type { ResourceIdentifier } from '../../../../src/http/representation/ResourceIdentifier'; import type { RepresentationConverter } from '../../../../src/storage/conversion/RepresentationConverter'; import { readableToString } from '../../../../src/util/StreamUtil'; import { StaticAsyncHandler } from '../../../util/StaticAsyncHandler'; @@ -8,6 +9,7 @@ import 'jest-rdf'; describe('An RdfValidator', (): void => { let converter: RepresentationConverter; let validator: RdfValidator; + const identifier: ResourceIdentifier = { path: 'any/path' }; beforeEach(async(): Promise => { converter = new StaticAsyncHandler(true, null); @@ -20,14 +22,15 @@ describe('An RdfValidator', (): void => { it('always accepts content-type internal/quads.', async(): Promise => { const representation = new BasicRepresentation('data', 'internal/quads'); - await expect(validator.handle(representation)).resolves.toBeUndefined(); + await expect(validator.handle({ representation, identifier })).resolves.toEqual(representation); }); it('validates data by running it through a converter.', async(): Promise => { converter.handleSafe = jest.fn().mockResolvedValue(new BasicRepresentation('transformedData', 'wrongType')); const representation = new BasicRepresentation('data', 'content-type'); const quads = representation.metadata.quads(); - await expect(validator.handle(representation)).resolves.toBeUndefined(); + // Output is not important for this Validator + await expect(validator.handle({ representation, identifier })).resolves.toBeDefined(); // Make sure the data can still be streamed await expect(readableToString(representation.data)).resolves.toBe('data'); // Make sure the metadata was not changed @@ -37,7 +40,7 @@ describe('An RdfValidator', (): void => { it('throws an error when validating invalid data.', async(): Promise => { converter.handleSafe = jest.fn().mockRejectedValue(new Error('bad data!')); const representation = new BasicRepresentation('data', 'content-type'); - await expect(validator.handle(representation)).rejects.toThrow('bad data!'); + await expect(validator.handle({ representation, identifier })).rejects.toThrow('bad data!'); // Make sure the data on the readable has not been reset expect(representation.data.destroyed).toBe(true); }); diff --git a/test/unit/http/input/metadata/ContentLengthParser.test.ts b/test/unit/http/input/metadata/ContentLengthParser.test.ts new file mode 100644 index 000000000..1805ed9eb --- /dev/null +++ b/test/unit/http/input/metadata/ContentLengthParser.test.ts @@ -0,0 +1,32 @@ +import { ContentLengthParser } from '../../../../../src/http/input/metadata/ContentLengthParser'; +import { RepresentationMetadata } from '../../../../../src/http/representation/RepresentationMetadata'; +import type { HttpRequest } from '../../../../../src/server/HttpRequest'; + +describe('A ContentLengthParser', (): void => { + const parser = new ContentLengthParser(); + let request: HttpRequest; + let metadata: RepresentationMetadata; + + beforeEach(async(): Promise => { + request = { headers: {}} as HttpRequest; + metadata = new RepresentationMetadata(); + }); + + it('does nothing if there is no content-length header.', async(): Promise => { + await expect(parser.handle({ request, metadata })).resolves.toBeUndefined(); + expect(metadata.quads()).toHaveLength(0); + }); + + it('sets the given content-length as metadata.', async(): Promise => { + request.headers['content-length'] = '50'; + await expect(parser.handle({ request, metadata })).resolves.toBeUndefined(); + expect(metadata.quads()).toHaveLength(1); + expect(metadata.contentLength).toBe(50); + }); + + it('does not set a content-length when the header is invalid.', async(): Promise => { + request.headers['content-length'] = 'aabbcc50ccbbaa'; + await expect(parser.handle({ request, metadata })).resolves.toBeUndefined(); + expect(metadata.quads()).toHaveLength(0); + }); +}); diff --git a/test/unit/http/representation/RepresentationMetadata.test.ts b/test/unit/http/representation/RepresentationMetadata.test.ts index 9ecd27603..b56017752 100644 --- a/test/unit/http/representation/RepresentationMetadata.test.ts +++ b/test/unit/http/representation/RepresentationMetadata.test.ts @@ -60,6 +60,16 @@ describe('A RepresentationMetadata', (): void => { expect(metadata.contentType).toEqual('text/turtle'); }); + it('stores the content-length correctly.', async(): Promise => { + metadata = new RepresentationMetadata(); + metadata.contentLength = 50; + expect(metadata.contentLength).toEqual(50); + + metadata = new RepresentationMetadata(); + metadata.contentLength = undefined; + expect(metadata.contentLength).toBeUndefined(); + }); + it('copies an other metadata object.', async(): Promise => { const other = new RepresentationMetadata({ path: 'otherId' }, { 'test:pred': 'objVal' }); metadata = new RepresentationMetadata(other); diff --git a/test/unit/quota/GlobalQuotaStrategy.test.ts b/test/unit/quota/GlobalQuotaStrategy.test.ts new file mode 100644 index 000000000..650d11e97 --- /dev/null +++ b/test/unit/quota/GlobalQuotaStrategy.test.ts @@ -0,0 +1,37 @@ +import type { ResourceIdentifier } from '../../../src/http/representation/ResourceIdentifier'; +import { GlobalQuotaStrategy } from '../../../src/storage/quota/GlobalQuotaStrategy'; +import { UNIT_BYTES } from '../../../src/storage/size-reporter/Size'; +import type { Size } from '../../../src/storage/size-reporter/Size'; +import type { SizeReporter } from '../../../src/storage/size-reporter/SizeReporter'; + +describe('GlobalQuotaStrategy', (): void => { + let strategy: GlobalQuotaStrategy; + let mockSize: Size; + let mockReporter: jest.Mocked>; + let mockBase: string; + + beforeEach((): void => { + mockSize = { amount: 2000, unit: UNIT_BYTES }; + mockBase = ''; + mockReporter = { + getSize: jest.fn(async(identifier: ResourceIdentifier): Promise => ({ + unit: mockSize.unit, + // This mock will return 1000 as size of the root and 50 for any other resource + amount: identifier.path === mockBase ? 1000 : 50, + })), + getUnit: jest.fn().mockReturnValue(mockSize.unit), + calculateChunkSize: jest.fn(async(chunk: any): Promise => chunk.length), + estimateSize: jest.fn().mockResolvedValue(5), + }; + strategy = new GlobalQuotaStrategy(mockSize, mockReporter, mockBase); + }); + + describe('getAvailableSpace()', (): void => { + it('should return the correct amount of available space left.', async(): Promise => { + const result = strategy.getAvailableSpace({ path: 'any/path' }); + await expect(result).resolves.toEqual( + expect.objectContaining({ amount: mockSize.amount - 950 }), + ); + }); + }); +}); diff --git a/test/unit/quota/PodQuotaStrategy.test.ts b/test/unit/quota/PodQuotaStrategy.test.ts new file mode 100644 index 000000000..64ef455b2 --- /dev/null +++ b/test/unit/quota/PodQuotaStrategy.test.ts @@ -0,0 +1,77 @@ +import { RepresentationMetadata } from '../../../src/http/representation/RepresentationMetadata'; +import type { ResourceIdentifier } from '../../../src/http/representation/ResourceIdentifier'; +import type { DataAccessor } from '../../../src/storage/accessors/DataAccessor'; +import { PodQuotaStrategy } from '../../../src/storage/quota/PodQuotaStrategy'; +import { UNIT_BYTES } from '../../../src/storage/size-reporter/Size'; +import type { Size } from '../../../src/storage/size-reporter/Size'; +import type { SizeReporter } from '../../../src/storage/size-reporter/SizeReporter'; +import { NotFoundHttpError } from '../../../src/util/errors/NotFoundHttpError'; +import type { IdentifierStrategy } from '../../../src/util/identifiers/IdentifierStrategy'; +import { SingleRootIdentifierStrategy } from '../../../src/util/identifiers/SingleRootIdentifierStrategy'; +import { PIM, RDF } from '../../../src/util/Vocabularies'; +import { mockFs } from '../../util/Util'; + +jest.mock('fs'); + +describe('PodQuotaStrategy', (): void => { + let strategy: PodQuotaStrategy; + let mockSize: Size; + let mockReporter: jest.Mocked>; + let identifierStrategy: IdentifierStrategy; + let accessor: jest.Mocked; + const base = 'http://localhost:3000/'; + const rootFilePath = 'folder'; + + beforeEach((): void => { + jest.restoreAllMocks(); + mockFs(rootFilePath, new Date()); + mockSize = { amount: 2000, unit: UNIT_BYTES }; + identifierStrategy = new SingleRootIdentifierStrategy(base); + mockReporter = { + getSize: jest.fn().mockResolvedValue({ unit: mockSize.unit, amount: 50 }), + getUnit: jest.fn().mockReturnValue(mockSize.unit), + calculateChunkSize: jest.fn(async(chunk: any): Promise => chunk.length), + estimateSize: jest.fn().mockResolvedValue(5), + }; + accessor = { + // Assume that the pod is called "nested" + getMetadata: jest.fn().mockImplementation( + async(identifier: ResourceIdentifier): Promise => { + const res = new RepresentationMetadata(); + if (identifier.path === `${base}nested/`) { + res.add(RDF.type, PIM.Storage); + } + return res; + }, + ), + } as any; + strategy = new PodQuotaStrategy(mockSize, mockReporter, identifierStrategy, accessor); + }); + + describe('getAvailableSpace()', (): void => { + it('should return a Size containing MAX_SAFE_INTEGER when writing outside a pod.', async(): Promise => { + const result = strategy.getAvailableSpace({ path: `${base}file.txt` }); + await expect(result).resolves.toEqual(expect.objectContaining({ amount: Number.MAX_SAFE_INTEGER })); + }); + it('should ignore the size of the existing resource when writing inside a pod.', async(): Promise => { + const result = strategy.getAvailableSpace({ path: `${base}nested/nested2/file.txt` }); + await expect(result).resolves.toEqual(expect.objectContaining({ amount: mockSize.amount })); + expect(mockReporter.getSize).toHaveBeenCalledTimes(2); + }); + it('should return a Size containing the available space when writing inside a pod.', async(): Promise => { + accessor.getMetadata.mockImplementationOnce((): any => { + throw new NotFoundHttpError(); + }); + const result = strategy.getAvailableSpace({ path: `${base}nested/nested2/file.txt` }); + await expect(result).resolves.toEqual(expect.objectContaining({ amount: mockSize.amount })); + expect(mockReporter.getSize).toHaveBeenCalledTimes(2); + }); + it('should throw when looking for pim:Storage errors.', async(): Promise => { + accessor.getMetadata.mockImplementationOnce((): any => { + throw new Error('error'); + }); + const result = strategy.getAvailableSpace({ path: `${base}nested/nested2/file.txt` }); + await expect(result).rejects.toThrow('error'); + }); + }); +}); diff --git a/test/unit/quota/QuotaStrategy.test.ts b/test/unit/quota/QuotaStrategy.test.ts new file mode 100644 index 000000000..c43c62367 --- /dev/null +++ b/test/unit/quota/QuotaStrategy.test.ts @@ -0,0 +1,88 @@ +import { RepresentationMetadata } from '../../../src/http/representation/RepresentationMetadata'; +import { QuotaStrategy } from '../../../src/storage/quota/QuotaStrategy'; +import { UNIT_BYTES } from '../../../src/storage/size-reporter/Size'; +import type { Size } from '../../../src/storage/size-reporter/Size'; +import type { SizeReporter } from '../../../src/storage/size-reporter/SizeReporter'; +import { guardedStreamFrom, pipeSafely } from '../../../src/util/StreamUtil'; +import { mockFs } from '../../util/Util'; + +jest.mock('fs'); + +class QuotaStrategyWrapper extends QuotaStrategy { + public constructor(reporter: SizeReporter, limit: Size) { + super(reporter, limit); + } + + public getAvailableSpace = async(): Promise => ({ unit: UNIT_BYTES, amount: 5 }); + protected getTotalSpaceUsed = async(): Promise => ({ unit: UNIT_BYTES, amount: 5 }); +} + +describe('A QuotaStrategy', (): void => { + let strategy: QuotaStrategyWrapper; + let mockSize: Size; + let mockReporter: jest.Mocked>; + const base = 'http://localhost:3000/'; + const rootFilePath = 'folder'; + + beforeEach((): void => { + jest.restoreAllMocks(); + mockFs(rootFilePath, new Date()); + mockSize = { amount: 2000, unit: UNIT_BYTES }; + mockReporter = { + getSize: jest.fn().mockResolvedValue({ unit: mockSize.unit, amount: 50 }), + getUnit: jest.fn().mockReturnValue(mockSize.unit), + calculateChunkSize: jest.fn(async(chunk: any): Promise => chunk.length), + estimateSize: jest.fn().mockResolvedValue(5), + }; + strategy = new QuotaStrategyWrapper(mockReporter, mockSize); + }); + + describe('constructor()', (): void => { + it('should set the passed parameters as properties.', async(): Promise => { + expect(strategy.limit).toEqual(mockSize); + expect(strategy.reporter).toEqual(mockReporter); + }); + }); + + describe('estimateSize()', (): void => { + it('should return a Size object containing the correct unit and amount.', async(): Promise => { + await expect(strategy.estimateSize(new RepresentationMetadata())).resolves.toEqual( + // This '5' comes from the reporter mock a little up in this file + expect.objectContaining({ unit: mockSize.unit, amount: 5 }), + ); + }); + it('should return undefined when the reporter returns undefined.', async(): Promise => { + mockReporter.estimateSize.mockResolvedValueOnce(undefined); + await expect(strategy.estimateSize(new RepresentationMetadata())).resolves.toBeUndefined(); + }); + }); + + describe('createQuotaGuard()', (): void => { + it('should return a passthrough that destroys the stream when quota is exceeded.', async(): Promise => { + strategy.getAvailableSpace = jest.fn().mockReturnValue({ amount: 50, unit: mockSize.unit }); + const fiftyChars = 'A'.repeat(50); + const stream = guardedStreamFrom(fiftyChars); + const track = await strategy.createQuotaGuard({ path: `${base}nested/file2.txt` }); + const piped = pipeSafely(stream, track); + + for (let i = 0; i < 10; i++) { + stream.push(fiftyChars); + } + + expect(piped.destroyed).toBe(false); + + for (let i = 0; i < 10; i++) { + stream.push(fiftyChars); + } + + expect(piped.destroyed).toBe(false); + + stream.push(fiftyChars); + + const destroy = new Promise((resolve): void => { + piped.on('error', (): void => resolve()); + }); + await expect(destroy).resolves.toBeUndefined(); + }); + }); +}); diff --git a/test/unit/storage/accessors/AtomicFileDataAccessor.test.ts b/test/unit/storage/accessors/AtomicFileDataAccessor.test.ts new file mode 100644 index 000000000..15be725a8 --- /dev/null +++ b/test/unit/storage/accessors/AtomicFileDataAccessor.test.ts @@ -0,0 +1,97 @@ +import 'jest-rdf'; +import type { Readable } from 'stream'; +import { RepresentationMetadata } from '../../../../src/http/representation/RepresentationMetadata'; +import { AtomicFileDataAccessor } from '../../../../src/storage/accessors/AtomicFileDataAccessor'; +import { ExtensionBasedMapper } from '../../../../src/storage/mapping/ExtensionBasedMapper'; +import { APPLICATION_OCTET_STREAM } from '../../../../src/util/ContentTypes'; +import type { Guarded } from '../../../../src/util/GuardedStream'; +import { guardedStreamFrom } from '../../../../src/util/StreamUtil'; +import { CONTENT_TYPE } from '../../../../src/util/Vocabularies'; +import { mockFs } from '../../../util/Util'; + +jest.mock('fs'); + +describe('AtomicFileDataAccessor', (): void => { + const rootFilePath = 'uploads'; + const base = 'http://test.com/'; + let accessor: AtomicFileDataAccessor; + let cache: { data: any }; + let metadata: RepresentationMetadata; + let data: Guarded; + + beforeEach(async(): Promise => { + cache = mockFs(rootFilePath, new Date()); + accessor = new AtomicFileDataAccessor( + new ExtensionBasedMapper(base, rootFilePath), + rootFilePath, + './.internal/tempFiles/', + ); + // The 'mkdirSync' in AtomicFileDataAccessor's constructor does not seem to create the folder in the + // cache object used for mocking fs. + // This line creates what represents a folder in the cache object + cache.data['.internal'] = { tempFiles: {}}; + metadata = new RepresentationMetadata(APPLICATION_OCTET_STREAM); + data = guardedStreamFrom([ 'data' ]); + }); + + describe('writing a document', (): void => { + it('writes the data to the corresponding file.', async(): Promise => { + await expect(accessor.writeDocument({ path: `${base}resource` }, data, metadata)).resolves.toBeUndefined(); + expect(cache.data.resource).toBe('data'); + }); + + it('writes metadata to the corresponding metadata file.', async(): Promise => { + metadata = new RepresentationMetadata({ path: `${base}res.ttl` }, + { [CONTENT_TYPE]: 'text/turtle', likes: 'apples' }); + await expect(accessor.writeDocument({ path: `${base}res.ttl` }, data, metadata)).resolves.toBeUndefined(); + expect(cache.data['res.ttl']).toBe('data'); + expect(cache.data['res.ttl.meta']).toMatch(`<${base}res.ttl> "apples".`); + }); + + it('should delete temp file when done writing.', async(): Promise => { + await expect(accessor.writeDocument({ path: `${base}resource` }, data, metadata)).resolves.toBeUndefined(); + expect(Object.keys(cache.data['.internal'].tempFiles)).toHaveLength(0); + expect(cache.data.resource).toBe('data'); + }); + + it('should throw an error when writing the data goes wrong.', async(): Promise => { + data.read = jest.fn((): any => { + data.emit('error', new Error('error')); + return null; + }); + jest.requireMock('fs').promises.stat = jest.fn((): any => ({ + isFile: (): boolean => false, + })); + await expect(accessor.writeDocument({ path: `${base}res.ttl` }, data, metadata)).rejects.toThrow('error'); + }); + + it('should throw when renaming / moving the file goes wrong.', async(): Promise => { + jest.requireMock('fs').promises.rename = jest.fn((): any => { + throw new Error('error'); + }); + jest.requireMock('fs').promises.stat = jest.fn((): any => ({ + isFile: (): boolean => true, + })); + await expect(accessor.writeDocument({ path: `${base}res.ttl` }, data, metadata)).rejects.toThrow('error'); + }); + + it('should (on error) not unlink the temp file if it does not exist.', async(): Promise => { + jest.requireMock('fs').promises.rename = jest.fn((): any => { + throw new Error('error'); + }); + jest.requireMock('fs').promises.stat = jest.fn((): any => ({ + isFile: (): boolean => false, + })); + await expect(accessor.writeDocument({ path: `${base}res.ttl` }, data, metadata)).rejects.toThrow('error'); + }); + + it('should throw when renaming / moving the file goes wrong and the temp file does not exist.', + async(): Promise => { + jest.requireMock('fs').promises.rename = jest.fn((): any => { + throw new Error('error'); + }); + jest.requireMock('fs').promises.stat = jest.fn(); + await expect(accessor.writeDocument({ path: `${base}res.ttl` }, data, metadata)).rejects.toThrow('error'); + }); + }); +}); diff --git a/test/unit/storage/accessors/PassthroughDataAccessor.test.ts b/test/unit/storage/accessors/PassthroughDataAccessor.test.ts new file mode 100644 index 000000000..923d8b67f --- /dev/null +++ b/test/unit/storage/accessors/PassthroughDataAccessor.test.ts @@ -0,0 +1,80 @@ +import { BasicRepresentation } from '../../../../src/http/representation/BasicRepresentation'; +import { RepresentationMetadata } from '../../../../src/http/representation/RepresentationMetadata'; +import type { DataAccessor } from '../../../../src/storage/accessors/DataAccessor'; +import { PassthroughDataAccessor } from '../../../../src/storage/accessors/PassthroughDataAccessor'; +import { guardedStreamFrom } from '../../../../src/util/StreamUtil'; + +describe('ValidatingDataAccessor', (): void => { + let passthrough: PassthroughDataAccessor; + let childAccessor: jest.Mocked; + + const mockIdentifier = { path: 'http://localhost/test.txt' }; + const mockMetadata = new RepresentationMetadata(); + const mockData = guardedStreamFrom('test string'); + const mockRepresentation = new BasicRepresentation(mockData, mockMetadata); + + beforeEach(async(): Promise => { + jest.clearAllMocks(); + childAccessor = { + canHandle: jest.fn(), + writeDocument: jest.fn(), + getData: jest.fn(), + getChildren: jest.fn(), + writeContainer: jest.fn(), + deleteResource: jest.fn(), + getMetadata: jest.fn(), + }; + childAccessor.getChildren = jest.fn(); + passthrough = new PassthroughDataAccessor(childAccessor); + }); + + describe('writeDocument()', (): void => { + it('should call the accessors writeDocument() function.', async(): Promise => { + await passthrough.writeDocument(mockIdentifier, mockData, mockMetadata); + expect(childAccessor.writeDocument).toHaveBeenCalledTimes(1); + expect(childAccessor.writeDocument).toHaveBeenCalledWith(mockIdentifier, mockData, mockMetadata); + }); + }); + describe('canHandle()', (): void => { + it('should call the accessors canHandle() function.', async(): Promise => { + await passthrough.canHandle(mockRepresentation); + expect(childAccessor.canHandle).toHaveBeenCalledTimes(1); + expect(childAccessor.canHandle).toHaveBeenCalledWith(mockRepresentation); + }); + }); + describe('getData()', (): void => { + it('should call the accessors getData() function.', async(): Promise => { + await passthrough.getData(mockIdentifier); + expect(childAccessor.getData).toHaveBeenCalledTimes(1); + expect(childAccessor.getData).toHaveBeenCalledWith(mockIdentifier); + }); + }); + describe('getMetadata()', (): void => { + it('should call the accessors getMetadata() function.', async(): Promise => { + await passthrough.getMetadata(mockIdentifier); + expect(childAccessor.getMetadata).toHaveBeenCalledTimes(1); + expect(childAccessor.getMetadata).toHaveBeenCalledWith(mockIdentifier); + }); + }); + describe('getChildren()', (): void => { + it('should call the accessors getChildren() function.', async(): Promise => { + passthrough.getChildren(mockIdentifier); + expect(childAccessor.getChildren).toHaveBeenCalledTimes(1); + expect(childAccessor.getChildren).toHaveBeenCalledWith(mockIdentifier); + }); + }); + describe('deleteResource()', (): void => { + it('should call the accessors deleteResource() function.', async(): Promise => { + await passthrough.deleteResource(mockIdentifier); + expect(childAccessor.deleteResource).toHaveBeenCalledTimes(1); + expect(childAccessor.deleteResource).toHaveBeenCalledWith(mockIdentifier); + }); + }); + describe('writeContainer()', (): void => { + it('should call the accessors writeContainer() function.', async(): Promise => { + await passthrough.writeContainer(mockIdentifier, mockMetadata); + expect(childAccessor.writeContainer).toHaveBeenCalledTimes(1); + expect(childAccessor.writeContainer).toHaveBeenCalledWith(mockIdentifier, mockMetadata); + }); + }); +}); diff --git a/test/unit/storage/accessors/ValidatingDataAccessor.test.ts b/test/unit/storage/accessors/ValidatingDataAccessor.test.ts new file mode 100644 index 000000000..645526c63 --- /dev/null +++ b/test/unit/storage/accessors/ValidatingDataAccessor.test.ts @@ -0,0 +1,54 @@ +import type { Validator, ValidatorInput } from '../../../../src/http/auxiliary/Validator'; +import { BasicRepresentation } from '../../../../src/http/representation/BasicRepresentation'; +import type { Representation } from '../../../../src/http/representation/Representation'; +import { RepresentationMetadata } from '../../../../src/http/representation/RepresentationMetadata'; +import type { DataAccessor } from '../../../../src/storage/accessors/DataAccessor'; +import { ValidatingDataAccessor } from '../../../../src/storage/accessors/ValidatingDataAccessor'; +import { guardedStreamFrom } from '../../../../src/util/StreamUtil'; + +describe('ValidatingDataAccessor', (): void => { + let validatingAccessor: ValidatingDataAccessor; + let childAccessor: jest.Mocked; + let validator: jest.Mocked; + + const mockIdentifier = { path: 'http://localhost/test.txt' }; + const mockMetadata = new RepresentationMetadata(); + const mockData = guardedStreamFrom('test string'); + const mockRepresentation = new BasicRepresentation(mockData, mockMetadata); + + beforeEach(async(): Promise => { + jest.clearAllMocks(); + childAccessor = { + writeDocument: jest.fn(), + writeContainer: jest.fn(), + } as any; + childAccessor.getChildren = jest.fn(); + validator = { + handleSafe: jest.fn(async(input: ValidatorInput): Promise => input.representation), + } as any; + validatingAccessor = new ValidatingDataAccessor(childAccessor, validator); + }); + + describe('writeDocument()', (): void => { + it('should call the validator\'s handleSafe() function.', async(): Promise => { + await validatingAccessor.writeDocument(mockIdentifier, mockData, mockMetadata); + expect(validator.handleSafe).toHaveBeenCalledTimes(1); + expect(validator.handleSafe).toHaveBeenCalledWith({ + representation: mockRepresentation, + identifier: mockIdentifier, + }); + }); + it('should call the accessors writeDocument() function.', async(): Promise => { + await validatingAccessor.writeDocument(mockIdentifier, mockData, mockMetadata); + expect(childAccessor.writeDocument).toHaveBeenCalledTimes(1); + expect(childAccessor.writeDocument).toHaveBeenCalledWith(mockIdentifier, mockData, mockMetadata); + }); + }); + describe('writeContainer()', (): void => { + it('should call the accessors writeContainer() function.', async(): Promise => { + await validatingAccessor.writeContainer(mockIdentifier, mockMetadata); + expect(childAccessor.writeContainer).toHaveBeenCalledTimes(1); + expect(childAccessor.writeContainer).toHaveBeenCalledWith(mockIdentifier, mockMetadata); + }); + }); +}); diff --git a/test/unit/storage/size-reporter/FileSizeReporter.test.ts b/test/unit/storage/size-reporter/FileSizeReporter.test.ts new file mode 100644 index 000000000..b471cb1b9 --- /dev/null +++ b/test/unit/storage/size-reporter/FileSizeReporter.test.ts @@ -0,0 +1,132 @@ +import { promises as fsPromises } from 'fs'; +import { RepresentationMetadata } from '../../../../src/http/representation/RepresentationMetadata'; +import type { ResourceIdentifier } from '../../../../src/http/representation/ResourceIdentifier'; +import type { FileIdentifierMapper, ResourceLink } from '../../../../src/storage/mapping/FileIdentifierMapper'; +import { FileSizeReporter } from '../../../../src/storage/size-reporter/FileSizeReporter'; +import { UNIT_BYTES } from '../../../../src/storage/size-reporter/Size'; +import { joinFilePath } from '../../../../src/util/PathUtil'; +import { mockFs } from '../../../util/Util'; + +jest.mock('fs'); + +describe('A FileSizeReporter', (): void => { + // Folder size is fixed to 4 in the mock + const folderSize = 4; + const mapper: jest.Mocked = { + mapFilePathToUrl: jest.fn(), + mapUrlToFilePath: jest.fn().mockImplementation((id: ResourceIdentifier): ResourceLink => ({ + filePath: id.path, + identifier: id, + isMetadata: false, + })), + }; + const fileRoot = joinFilePath(process.cwd(), '/test-folder/'); + const fileSizeReporter = new FileSizeReporter( + mapper, + fileRoot, + [ '^/\\.internal$' ], + ); + + beforeEach(async(): Promise => { + mockFs(fileRoot); + }); + + it('should work without the ignoreFolders constructor parameter.', async(): Promise => { + const tempFileSizeReporter = new FileSizeReporter( + mapper, + fileRoot, + ); + + const testFile = joinFilePath(fileRoot, '/test.txt'); + await fsPromises.writeFile(testFile, 'A'.repeat(20)); + + const result = tempFileSizeReporter.getSize({ path: testFile }); + await expect(result).resolves.toBeDefined(); + expect((await result).amount).toBe(20); + }); + + it('should report the right file size.', async(): Promise => { + const testFile = joinFilePath(fileRoot, '/test.txt'); + await fsPromises.writeFile(testFile, 'A'.repeat(20)); + + const result = fileSizeReporter.getSize({ path: testFile }); + await expect(result).resolves.toBeDefined(); + expect((await result).amount).toBe(20); + }); + + it('should work recursively.', async(): Promise => { + const containerFile = joinFilePath(fileRoot, '/test-folder-1/'); + await fsPromises.mkdir(containerFile, { recursive: true }); + const testFile = joinFilePath(containerFile, '/test.txt'); + await fsPromises.writeFile(testFile, 'A'.repeat(20)); + + const fileSize = fileSizeReporter.getSize({ path: testFile }); + const containerSize = fileSizeReporter.getSize({ path: containerFile }); + + await expect(fileSize).resolves.toEqual(expect.objectContaining({ amount: 20 })); + await expect(containerSize).resolves.toEqual(expect.objectContaining({ amount: 20 + folderSize })); + }); + + it('should not count files located in an ignored folder.', async(): Promise => { + const containerFile = joinFilePath(fileRoot, '/test-folder-2/'); + await fsPromises.mkdir(containerFile, { recursive: true }); + const testFile = joinFilePath(containerFile, '/test.txt'); + await fsPromises.writeFile(testFile, 'A'.repeat(20)); + + const internalContainerFile = joinFilePath(fileRoot, '/.internal/'); + await fsPromises.mkdir(internalContainerFile, { recursive: true }); + const internalTestFile = joinFilePath(internalContainerFile, '/test.txt'); + await fsPromises.writeFile(internalTestFile, 'A'.repeat(30)); + + const fileSize = fileSizeReporter.getSize({ path: testFile }); + const containerSize = fileSizeReporter.getSize({ path: containerFile }); + const rootSize = fileSizeReporter.getSize({ path: fileRoot }); + + const expectedFileSize = 20; + const expectedContainerSize = 20 + folderSize; + const expectedRootSize = expectedContainerSize + folderSize; + + await expect(fileSize).resolves.toEqual(expect.objectContaining({ amount: expectedFileSize })); + await expect(containerSize).resolves.toEqual(expect.objectContaining({ amount: expectedContainerSize })); + await expect(rootSize).resolves.toEqual(expect.objectContaining({ amount: expectedRootSize })); + }); + + it('should have the unit in its return value.', async(): Promise => { + const testFile = joinFilePath(fileRoot, '/test2.txt'); + await fsPromises.writeFile(testFile, 'A'.repeat(20)); + + const result = fileSizeReporter.getSize({ path: testFile }); + await expect(result).resolves.toBeDefined(); + expect((await result).unit).toBe(UNIT_BYTES); + }); + + it('getUnit() should return UNIT_BYTES.', (): void => { + expect(fileSizeReporter.getUnit()).toBe(UNIT_BYTES); + }); + + it('should return 0 when the size of a non existent file is requested.', async(): Promise => { + const result = fileSizeReporter.getSize({ path: joinFilePath(fileRoot, '/test.txt') }); + await expect(result).resolves.toEqual(expect.objectContaining({ amount: 0 })); + }); + + it('should calculate the chunk size correctly.', async(): Promise => { + const testString = 'testesttesttesttest==testtest'; + const result = fileSizeReporter.calculateChunkSize(testString); + await expect(result).resolves.toEqual(testString.length); + }); + + describe('estimateSize()', (): void => { + it('should return the content-length.', async(): Promise => { + const metadata = new RepresentationMetadata(); + metadata.contentLength = 100; + await expect(fileSizeReporter.estimateSize(metadata)).resolves.toEqual(100); + }); + it( + 'should return undefined if no content-length is present in the metadata.', + async(): Promise => { + const metadata = new RepresentationMetadata(); + await expect(fileSizeReporter.estimateSize(metadata)).resolves.toBeUndefined(); + }, + ); + }); +}); diff --git a/test/unit/storage/validators/QuotaValidator.test.ts b/test/unit/storage/validators/QuotaValidator.test.ts new file mode 100644 index 000000000..e496f5842 --- /dev/null +++ b/test/unit/storage/validators/QuotaValidator.test.ts @@ -0,0 +1,120 @@ +import type { Readable } from 'stream'; +import { PassThrough } from 'stream'; +import type { ValidatorInput } from '../../../../src/http/auxiliary/Validator'; +import { BasicRepresentation } from '../../../../src/http/representation/BasicRepresentation'; +import { RepresentationMetadata } from '../../../../src/http/representation/RepresentationMetadata'; +import type { ResourceIdentifier } from '../../../../src/http/representation/ResourceIdentifier'; +import type { QuotaStrategy } from '../../../../src/storage/quota/QuotaStrategy'; +import { UNIT_BYTES } from '../../../../src/storage/size-reporter/Size'; +import type { SizeReporter } from '../../../../src/storage/size-reporter/SizeReporter'; +import { QuotaValidator } from '../../../../src/storage/validators/QuotaValidator'; +import { guardStream } from '../../../../src/util/GuardedStream'; +import type { Guarded } from '../../../../src/util/GuardedStream'; +import { guardedStreamFrom, readableToString } from '../../../../src/util/StreamUtil'; + +describe('QuotaValidator', (): void => { + let mockedStrategy: jest.Mocked; + let validator: QuotaValidator; + let identifier: ResourceIdentifier; + let mockMetadata: RepresentationMetadata; + let mockData: Guarded; + let mockInput: ValidatorInput; + let mockReporter: jest.Mocked>; + + beforeEach((): void => { + jest.clearAllMocks(); + identifier = { path: 'http://localhost/' }; + mockMetadata = new RepresentationMetadata(); + mockData = guardedStreamFrom([ 'test string' ]); + mockInput = { + representation: new BasicRepresentation(mockData, mockMetadata), + identifier, + }; + mockReporter = { + getSize: jest.fn(), + getUnit: jest.fn(), + calculateChunkSize: jest.fn(), + estimateSize: jest.fn().mockResolvedValue(8), + }; + mockedStrategy = { + reporter: mockReporter, + limit: { unit: UNIT_BYTES, amount: 8 }, + getAvailableSpace: jest.fn().mockResolvedValue({ unit: UNIT_BYTES, amount: 10 }), + estimateSize: jest.fn().mockResolvedValue({ unit: UNIT_BYTES, amount: 8 }), + createQuotaGuard: jest.fn().mockResolvedValue(guardStream(new PassThrough())), + } as any; + validator = new QuotaValidator(mockedStrategy); + }); + + describe('handle()', (): void => { + // Step 2 + it('should destroy the stream when estimated size is larger than the available size.', async(): Promise => { + mockedStrategy.estimateSize.mockResolvedValueOnce({ unit: UNIT_BYTES, amount: 11 }); + + const result = validator.handle(mockInput); + await expect(result).resolves.toBeDefined(); + const awaitedResult = await result; + + const prom = new Promise((resolve, reject): void => { + awaitedResult.data.on('error', (): void => resolve()); + awaitedResult.data.on('end', (): void => reject(new Error('reject'))); + }); + + // Consume the stream + await expect(readableToString(awaitedResult.data)) + .rejects.toThrow('Quota exceeded: Advertised Content-Length is'); + await expect(prom).resolves.toBeUndefined(); + }); + + // Step 3 + it('should destroy the stream when quota is exceeded during write.', async(): Promise => { + mockedStrategy.createQuotaGuard.mockResolvedValueOnce(guardStream(new PassThrough({ + async transform(this): Promise { + this.destroy(new Error('error')); + }, + }))); + + const result = validator.handle(mockInput); + await expect(result).resolves.toBeDefined(); + const awaitedResult = await result; + + const prom = new Promise((resolve, reject): void => { + awaitedResult.data.on('error', (): void => resolve()); + awaitedResult.data.on('end', (): void => reject(new Error('reject'))); + }); + + // Consume the stream + await expect(readableToString(awaitedResult.data)).rejects.toThrow('error'); + expect(mockedStrategy.createQuotaGuard).toHaveBeenCalledTimes(1); + await expect(prom).resolves.toBeUndefined(); + }); + + // Step 4 + it('should throw when quota were exceeded after stream was finished.', async(): Promise => { + const result = validator.handle(mockInput); + + // Putting this after the handle / before consuming the stream will only effect + // this function in the flush part of the code. + mockedStrategy.getAvailableSpace.mockResolvedValueOnce({ unit: UNIT_BYTES, amount: -100 }); + + await expect(result).resolves.toBeDefined(); + const awaitedResult = await result; + + const prom = new Promise((resolve, reject): void => { + awaitedResult.data.on('error', (): void => resolve()); + awaitedResult.data.on('end', (): void => reject(new Error('reject'))); + }); + + // Consume the stream + await expect(readableToString(awaitedResult.data)).rejects.toThrow('Quota exceeded after write completed'); + await expect(prom).resolves.toBeUndefined(); + }); + + it('should return a stream that is consumable without error if quota isn\'t exceeded.', async(): Promise => { + const result = validator.handle(mockInput); + await expect(result).resolves.toBeDefined(); + const awaitedResult = await result; + await expect(readableToString(awaitedResult.data)).resolves.toBe('test string'); + }); + }); +}); diff --git a/test/unit/util/errors/HttpError.test.ts b/test/unit/util/errors/HttpError.test.ts index 5853df215..c62e6d3ef 100644 --- a/test/unit/util/errors/HttpError.test.ts +++ b/test/unit/util/errors/HttpError.test.ts @@ -7,6 +7,7 @@ import { InternalServerError } from '../../../../src/util/errors/InternalServerE import { MethodNotAllowedHttpError } from '../../../../src/util/errors/MethodNotAllowedHttpError'; import { NotFoundHttpError } from '../../../../src/util/errors/NotFoundHttpError'; import { NotImplementedHttpError } from '../../../../src/util/errors/NotImplementedHttpError'; +import { PayloadHttpError } from '../../../../src/util/errors/PayloadHttpError'; import { PreconditionFailedHttpError } from '../../../../src/util/errors/PreconditionFailedHttpError'; import { UnauthorizedHttpError } from '../../../../src/util/errors/UnauthorizedHttpError'; import { UnsupportedMediaTypeHttpError } from '../../../../src/util/errors/UnsupportedMediaTypeHttpError'; @@ -27,6 +28,7 @@ describe('HttpError', (): void => { [ 'MethodNotAllowedHttpError', 405, MethodNotAllowedHttpError ], [ 'ConflictHttpError', 409, ConflictHttpError ], [ 'PreconditionFailedHttpError', 412, PreconditionFailedHttpError ], + [ 'PayloadHttpError', 413, PayloadHttpError ], [ 'UnsupportedMediaTypeHttpError', 415, UnsupportedMediaTypeHttpError ], [ 'InternalServerError', 500, InternalServerError ], [ 'NotImplementedHttpError', 501, NotImplementedHttpError ], diff --git a/test/util/Util.ts b/test/util/Util.ts index 40e652c00..9b6ac63f8 100644 --- a/test/util/Util.ts +++ b/test/util/Util.ts @@ -19,6 +19,8 @@ const portNames = [ 'SparqlStorage', 'Subdomains', 'WebSocketsProtocol', + 'PodQuota', + 'GlobalQuota', // Unit 'BaseHttpServerFactory', ] as const; @@ -122,7 +124,7 @@ export function mockFs(rootFilepath?: string, time?: Date): { data: any } { isFile: (): boolean => typeof folder[name] === 'string', isDirectory: (): boolean => typeof folder[name] === 'object', isSymbolicLink: (): boolean => typeof folder[name] === 'symbol', - size: typeof folder[name] === 'string' ? folder[name].length : 0, + size: typeof folder[name] === 'string' ? folder[name].length : 4, mtime: time, } as Stats; }, @@ -199,6 +201,21 @@ export function mockFs(rootFilepath?: string, time?: Date): { data: any } { const { folder, name } = getFolder(path); folder[name] = data; }, + async rename(path: string, destination: string): Promise { + const { folder, name } = getFolder(path); + if (!folder[name]) { + throwSystemError('ENOENT'); + } + if (!(await this.lstat(path)).isFile()) { + throwSystemError('EISDIR'); + } + + const { folder: folderDest, name: nameDest } = getFolder(destination); + folderDest[nameDest] = folder[name]; + + // eslint-disable-next-line @typescript-eslint/no-dynamic-delete + delete folder[name]; + }, }, };