feat: Add support for quota limits

* feat: implemented SizeReporter and FileSizeReporter

* test: FileSizeReporter tests

* feat: added QuotedDataAccessor

* test: added extra test to check recursiveness of filesizereporter

* feat: added QuotaStrategy interface

* feat: further progress in different files

* feat: wrote doc, tests and improved code

* feat: fixed bugs and code is now runnable and buildable

* feat: finished implementation

* fix: revert accidental chanegs

* fix: fileSizeReported did not count container size

* fix: bug calculating container sizes fixed

* test: FileSizeReporter tests

* test: QuotaDataValidator tests

* test: QuotaError tests

* fix: removed console.log

* doc: added doc to several files

* doc: changed doc for QuotaStrategy to new implementation

* fix: improved content length regex

* feat: improved GlobalQuotaStrategy code

* fix: made FileSizeReported readonly

* feat: added comments to quota-file.json

* fix: changed default tempFilePath variable

* test: included new tempFilePath variable in testing

* chore: created seperate command for start:file:quota to pass tests

* feat: removed all sync fs calls from FileSizeReporter

* feat: minor changes in multple files

* fix: changed function signatures to be in line with others

* feat: optimized quota data validation

* feat: improved FileSizeReporter code

* fix: corrected calculation of containersizes and fixed erroring edgecase

* feat: save content-length as number in metadata

* feat: added comments and changed GlobalQuotaStrategy constructor

* feat: changed file names and added small comment

* test: AtomicFileDataAccessor tests

* test: completed FileSizeReporter tests

* fix: content-length is now saved correctly in RepresentationMetadata

* feat: adapted content length metadata + tests

* fix: removed tempFilePath variable

* fix: reverted .gitignore

* fix: forgot to remove tempFilePath variable from componentsjs config

* test: GlobalQuotaStrategy tests

* feat: replaced DataValidator with Validator

* feat: reworked DataValidator

* feat: added calcultateChunkSize() to SizeReporter

* test: updated FileSizeReporter tests

* fix: tempFile location now relative to rootFilePath

* test: QuotaDataValidator tests

* fix: corrected FileSizeReporter tests

* fix: adapted FileSizeReporter tests

* fix: FileSizeReporter bug on Windows

* fix: regex linting error

* feat: changed Validator class

* feat: added PodQuotaStrategy to enable suota on a per pod basis

* chore: bump context versions

* fix: Capitalized comments in json file

* chore: renamed ValidatorArgs to ValidatorInput

* chore: order all exports

* fix: made TODO comment clearer

* chore: added seperated config files for global and pod based quota + fixed comments

* chore: made minor changes to comments

* feat: added PassthroughDataAccessor

* feat: added PasstroughtDataAccessor + tests

* fix: added invalid header check to ContentLengthParser

* chore: improved mocks

* chore: move quota limit higher up in config

* fix: atomicity issue in AtomicFileDataAccessor

* chore: moved .internal folder to config from FileSizeReporter

* fix: improved algorithm to ignore folders while calculating file size in FileSizeReporter

* fix: changes to support containers in the future

* fix: added error handling to prevent reading of unexistent files

* feat: added generic type to SizeReporter to calculate chunk sizes

* test: use mocked DataAccessor

* chore: added some comments to test and made minor improvement

* fix: fs mock rename

* chore: QuotaStrategy.estimateSize refactor

* chore: move trackAvailableSpace to abstract class QuotaStrategy

* fix: improved test case

* test: quota integration tests

* chore: edited some comments

* chore: change lstat to stat

* feat: moved estimateSize to SizeReporter to be consistent with calcultateChunkSize

* test: finish up tests to reach coverage

* fix: basic config

* fix: minor changes to test CI run

* fix: small fix for windows

* fix: improved writing to file

* chore: linting errors

* chore: rename trackAvailableSpace

* test: improved integration tests

* test: logging info for test debugging

* test: extra logging for debugging

* test: logging for debugging

* test: logging for debugging

* test: logging for debugging

* test: improved Quota integration test setup

* test: improve quota tests for CI run

* test: debugging Quota test

* test: uncommented global quota test

* test: changed global quota parameters

* test: logging for debugging

* test: logging cleanup

* chore: minor changes, mostly typo fixes

* chore: remove console.log

* fix: getting inconsistent results

* chore: try fix index.ts CI error

* chore: try fix CI error

* chore: try fix CI error

* chore: revert last commits

* chore: fix inconsistent files with origin

* test: minor test improvements

* chore: minor refactors and improvements

* fix: added extra try catch for breaking bug

* chore: improve config

* chore: minor code improvements

* test: use mockFs

* feat: add extra check in podQuotaStrategy

* chore: replace handle by handleSafe in ValidatingDataAccessor

* chore: typo

* test: improved Quota integration tests

* test: made comment in test more correct

* fix: rm -> rmdir for backwards compatibility

* fix: fsPromises issue

* chore: leave out irrelevant config

* chore: removed start script from package.json

* fix: Small fixes

Co-authored-by: Joachim Van Herwegen <joachimvh@gmail.com>
This commit is contained in:
Arthur Joppart 2022-01-21 10:49:05 +01:00 committed by GitHub
parent 9a1f324685
commit 0cb4d7b161
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
47 changed files with 1927 additions and 20 deletions

View File

@ -4,6 +4,7 @@
### New features
- The Identity Provider now uses the `webid` scope as required for Solid-OIDC.
- The `VoidLocker` can be used to disable locking for development/testing purposes. This can be enabled by changing the `/config/util/resource-locker/` import to `debug-void.json`
- Added support for setting a quota on the server. See the `config/quota-file.json` config for an example.
### Configuration changes
You might need to make changes to your v2 configuration if you use a custom config.

View File

@ -2,6 +2,7 @@
"@context": "https://linkedsoftwaredependencies.org/bundles/npm/@solid/community-server/^2.0.0/components/context.jsonld",
"import": [
"files-scs:config/ldp/metadata-parser/parsers/content-type.json",
"files-scs:config/ldp/metadata-parser/parsers/content-length.json",
"files-scs:config/ldp/metadata-parser/parsers/slug.json",
"files-scs:config/ldp/metadata-parser/parsers/link.json"
],
@ -12,6 +13,7 @@
"@type": "ParallelHandler",
"handlers": [
{ "@id": "urn:solid-server:default:ContentTypeParser" },
{ "@id": "urn:solid-server:default:ContentLengthParser" },
{ "@id": "urn:solid-server:default:SlugParser" },
{ "@id": "urn:solid-server:default:LinkRelParser" }
]

View File

@ -0,0 +1,10 @@
{
"@context": "https://linkedsoftwaredependencies.org/bundles/npm/@solid/community-server/^2.0.0/components/context.jsonld",
"@graph": [
{
"comment": "Converts content-length headers into RDF metadata.",
"@id": "urn:solid-server:default:ContentLengthParser",
"@type": "ContentLengthParser"
}
]
}

48
config/quota-file.json Normal file
View File

@ -0,0 +1,48 @@
{
"@context": "https://linkedsoftwaredependencies.org/bundles/npm/@solid/community-server/^2.0.0/components/context.jsonld",
"import": [
"files-scs:config/app/main/default.json",
"files-scs:config/app/init/default.json",
"files-scs:config/app/setup/required.json",
"files-scs:config/http/handler/default.json",
"files-scs:config/http/middleware/websockets.json",
"files-scs:config/http/server-factory/websockets.json",
"files-scs:config/http/static/default.json",
"files-scs:config/identity/access/public.json",
"files-scs:config/identity/email/default.json",
"files-scs:config/identity/handler/default.json",
"files-scs:config/identity/ownership/token.json",
"files-scs:config/identity/pod/static.json",
"files-scs:config/identity/registration/enabled.json",
"files-scs:config/ldp/authentication/dpop-bearer.json",
"files-scs:config/ldp/authorization/allow-all.json",
"files-scs:config/ldp/handler/default.json",
"files-scs:config/ldp/metadata-parser/default.json",
"files-scs:config/ldp/metadata-writer/default.json",
"files-scs:config/ldp/modes/default.json",
"files-scs:config/storage/backend/pod-quota-file.json",
"files-scs:config/storage/key-value/resource-store.json",
"files-scs:config/storage/middleware/default.json",
"files-scs:config/util/auxiliary/acl.json",
"files-scs:config/util/identifiers/suffix.json",
"files-scs:config/util/index/default.json",
"files-scs:config/util/logging/winston.json",
"files-scs:config/util/representation-conversion/default.json",
"files-scs:config/util/resource-locker/memory.json",
"files-scs:config/util/variables/default.json"
],
"@graph": [
{
"comment": "A server that stores its resources on disk while enforcing quota."
},
{
"@id": "urn:solid-server:default:QuotaStrategy",
"PodQuotaStrategy:_limit_amount": 7000,
"PodQuotaStrategy:_limit_unit": "bytes"
},
{
"@id": "urn:solid-server:default:SizeReporter",
"FileSizeReporter:_ignoreFolders": [ "^/\\.internal$" ]
}
]
}

View File

@ -5,7 +5,9 @@ Options related to how data and resources are stored.
The final part of the ResourceStore chain that handles data access.
* *dynamic*: The routing store used here is needed when using dynamic pod creation.
* *file*: Default setup with a file backend.
* *global-quota-file*: File backend with a global quota over the entire server.
* *memory*: Default setup with a memory backend.
* *pod-quota-file*: File backend with a max quota per pod.
* *regex*: Uses a different backend based on the container that is being used.
* *sparql*: Default setup with a SPARQL endpoint backend.
Also updates the converting store so all incoming data is transformed into quads.

View File

@ -0,0 +1,17 @@
{
"@context": "https://linkedsoftwaredependencies.org/bundles/npm/@solid/community-server/^2.0.0/components/context.jsonld",
"import": [
"files-scs:config/storage/backend/quota/global-quota-file.json",
"files-scs:config/storage/backend/quota/quota-file.json"
],
"@graph": [
{
"comment": "A global quota store setup with a file system backend.",
"@id": "urn:solid-server:default:ResourceStore_Backend",
"@type": "DataAccessorBasedStore",
"identifierStrategy": { "@id": "urn:solid-server:default:IdentifierStrategy" },
"auxiliaryStrategy": { "@id": "urn:solid-server:default:AuxiliaryStrategy" },
"accessor": { "@id": "urn:solid-server:default:FileDataAccessor" }
}
]
}

View File

@ -0,0 +1,17 @@
{
"@context": "https://linkedsoftwaredependencies.org/bundles/npm/@solid/community-server/^2.0.0/components/context.jsonld",
"import": [
"files-scs:config/storage/backend/quota/pod-quota-file.json",
"files-scs:config/storage/backend/quota/quota-file.json"
],
"@graph": [
{
"comment": "A pod quota store setup with a file system backend.",
"@id": "urn:solid-server:default:ResourceStore_Backend",
"@type": "DataAccessorBasedStore",
"identifierStrategy": { "@id": "urn:solid-server:default:IdentifierStrategy" },
"auxiliaryStrategy": { "@id": "urn:solid-server:default:AuxiliaryStrategy" },
"accessor": { "@id": "urn:solid-server:default:FileDataAccessor" }
}
]
}

View File

@ -0,0 +1,13 @@
{
"@context": "https://linkedsoftwaredependencies.org/bundles/npm/@solid/community-server/^2.0.0/components/context.jsonld",
"comment": "Configuration of a GlobalQuotaStrategy to enforce quota globally on the server.",
"@graph": [
{
"comment": "Enforces quota globally for all data on the server",
"@id": "urn:solid-server:default:QuotaStrategy",
"@type": "GlobalQuotaStrategy",
"reporter": { "@id": "urn:solid-server:default:SizeReporter" },
"base": { "@id": "urn:solid-server:default:variable:baseUrl" }
}
]
}

View File

@ -0,0 +1,14 @@
{
"@context": "https://linkedsoftwaredependencies.org/bundles/npm/@solid/community-server/^2.0.0/components/context.jsonld",
"comment": "Configuration of a PodQuotaStrategy to enforce pod quotas on the server.",
"@graph": [
{
"comment": "Enforces quota for all data per pod on the server",
"@id": "urn:solid-server:default:QuotaStrategy",
"@type": "PodQuotaStrategy",
"reporter": { "@id": "urn:solid-server:default:SizeReporter" },
"accessor": { "@id": "urn:solid-server:default:AtomicFileDataAccessor" },
"identifierStrategy": { "@id": "urn:solid-server:default:IdentifierStrategy" }
}
]
}

View File

@ -0,0 +1,37 @@
{
"@context": "https://linkedsoftwaredependencies.org/bundles/npm/@solid/community-server/^2.0.0/components/context.jsonld",
"comment": "DataAccessor configuration using a QuotaStrategy to enforce quota on the server.",
"@graph": [
{
"comment": "DataAccessor that writes data to the disk with atomicity in mind",
"@id": "urn:solid-server:default:AtomicFileDataAccessor",
"@type": "AtomicFileDataAccessor",
"resourceMapper": { "@id": "urn:solid-server:default:FileIdentifierMapper" },
"rootFilePath": { "@id": "urn:solid-server:default:variable:rootFilePath" },
"tempFilePath": "/.internal/tempFiles/"
},
{
"comment": "Calculates the space already taken up by a resource",
"@id": "urn:solid-server:default:SizeReporter",
"@type": "FileSizeReporter",
"fileIdentifierMapper": { "@id": "urn:solid-server:default:FileIdentifierMapper" },
"rootFilePath": { "@id": "urn:solid-server:default:variable:rootFilePath" }
},
{
"comment": "Validates the data being written to the server",
"@id": "urn:solid-server:default:QuotaValidator",
"@type": "QuotaValidator",
"strategy": { "@id": "urn:solid-server:default:QuotaStrategy" }
},
{
"comment": "Simple wrapper for another DataAccessor but adds validation",
"@id": "urn:solid-server:default:FileDataAccessor",
"@type": "ValidatingDataAccessor",
"accessor": { "@id": "urn:solid-server:default:AtomicFileDataAccessor" },
"validator": { "@id": "urn:solid-server:default:QuotaValidator" }
}
]
}

View File

@ -58,7 +58,10 @@ export class ComposedAuxiliaryStrategy implements AuxiliaryStrategy {
public async validate(representation: Representation): Promise<void> {
if (this.validator) {
return this.validator.handleSafe(representation);
await this.validator.handleSafe({
representation,
identifier: { path: representation.metadata.identifier.value },
});
}
}
}

View File

@ -3,6 +3,7 @@ import type { RepresentationConverter } from '../../storage/conversion/Represent
import { INTERNAL_QUADS } from '../../util/ContentTypes';
import { cloneRepresentation } from '../../util/ResourceUtil';
import type { Representation } from '../representation/Representation';
import type { ValidatorInput } from './Validator';
import { Validator } from './Validator';
/**
@ -17,12 +18,11 @@ export class RdfValidator extends Validator {
this.converter = converter;
}
public async handle(representation: Representation): Promise<void> {
public async handle({ representation, identifier }: ValidatorInput): Promise<Representation> {
// If the data already is quads format we know it's RDF
if (representation.metadata.contentType === INTERNAL_QUADS) {
return;
return representation;
}
const identifier = { path: representation.metadata.identifier.value };
const preferences = { type: { [INTERNAL_QUADS]: 1 }};
let result;
try {
@ -39,5 +39,7 @@ export class RdfValidator extends Validator {
}
// Drain stream to make sure data was parsed correctly
await arrayifyStream(result.data);
return representation;
}
}

View File

@ -1,7 +1,13 @@
import { AsyncHandler } from '../../util/handlers/AsyncHandler';
import type { Representation } from '../representation/Representation';
import type { ResourceIdentifier } from '../representation/ResourceIdentifier';
export type ValidatorInput = {
representation: Representation;
identifier: ResourceIdentifier;
};
/**
* Generic interface for classes that validate Representations in some way.
*/
export abstract class Validator extends AsyncHandler<Representation> { }
export abstract class Validator extends AsyncHandler<ValidatorInput, Representation> { }

View File

@ -0,0 +1,23 @@
import { getLoggerFor } from '../../../logging/LogUtil';
import type { HttpRequest } from '../../../server/HttpRequest';
import type { RepresentationMetadata } from '../../representation/RepresentationMetadata';
import { MetadataParser } from './MetadataParser';
/**
* Parser for the `content-length` header.
*/
export class ContentLengthParser extends MetadataParser {
protected readonly logger = getLoggerFor(this);
public async handle(input: { request: HttpRequest; metadata: RepresentationMetadata }): Promise<void> {
const contentLength = input.request.headers['content-length'];
if (contentLength) {
const length = /^\s*(\d+)\s*(?:;.*)?$/u.exec(contentLength)?.[1];
if (length) {
input.metadata.contentLength = Number(length);
} else {
this.logger.warn(`Invalid content-length header found: ${contentLength}.`);
}
}
}
}

View File

@ -2,8 +2,8 @@ import { DataFactory, Store } from 'n3';
import type { BlankNode, DefaultGraph, Literal, NamedNode, Quad, Term } from 'rdf-js';
import { getLoggerFor } from '../../logging/LogUtil';
import { InternalServerError } from '../../util/errors/InternalServerError';
import { toNamedTerm, toObjectTerm, toCachedNamedNode, isTerm } from '../../util/TermUtil';
import { CONTENT_TYPE, CONTENT_TYPE_TERM } from '../../util/Vocabularies';
import { toNamedTerm, toObjectTerm, toCachedNamedNode, isTerm, toLiteral } from '../../util/TermUtil';
import { CONTENT_TYPE, CONTENT_TYPE_TERM, CONTENT_LENGTH_TERM, XSD } from '../../util/Vocabularies';
import type { ResourceIdentifier } from './ResourceIdentifier';
import { isResourceIdentifier } from './ResourceIdentifier';
@ -316,4 +316,18 @@ export class RepresentationMetadata {
public set contentType(input) {
this.set(CONTENT_TYPE_TERM, input);
}
/**
* Shorthand for the CONTENT_LENGTH predicate.
*/
public get contentLength(): number | undefined {
const length = this.get(CONTENT_LENGTH_TERM);
return length?.value ? Number(length.value) : undefined;
}
public set contentLength(input) {
if (input) {
this.set(CONTENT_LENGTH_TERM, toLiteral(input, XSD.terms.integer));
}
}
}

View File

@ -21,10 +21,10 @@ export * from './authorization/permissions/MethodModesExtractor';
export * from './authorization/permissions/SparqlPatchModesExtractor';
// Authorization
export * from './authorization/OwnerPermissionReader';
export * from './authorization/AllStaticReader';
export * from './authorization/Authorizer';
export * from './authorization/AuxiliaryReader';
export * from './authorization/OwnerPermissionReader';
export * from './authorization/PathBasedReader';
export * from './authorization/PermissionBasedAuthorizer';
export * from './authorization/PermissionReader';
@ -57,6 +57,7 @@ export * from './http/input/identifier/OriginalUrlExtractor';
export * from './http/input/identifier/TargetExtractor';
// HTTP/Input/Metadata
export * from './http/input/metadata/ContentLengthParser';
export * from './http/input/metadata/ContentTypeParser';
export * from './http/input/metadata/LinkRelParser';
export * from './http/input/metadata/MetadataParser';
@ -248,10 +249,14 @@ export * from './server/util/RedirectAllHttpHandler';
export * from './server/util/RouterHandler';
// Storage/Accessors
export * from './storage/accessors/AtomicDataAccessor';
export * from './storage/accessors/AtomicFileDataAccessor';
export * from './storage/accessors/DataAccessor';
export * from './storage/accessors/FileDataAccessor';
export * from './storage/accessors/InMemoryDataAccessor';
export * from './storage/accessors/PassthroughDataAccessor';
export * from './storage/accessors/SparqlDataAccessor';
export * from './storage/accessors/ValidatingDataAccessor';
// Storage/Conversion
export * from './storage/conversion/BaseTypedRepresentationConverter';
@ -295,6 +300,11 @@ export * from './storage/patch/RepresentationPatcher';
export * from './storage/patch/RepresentationPatchHandler';
export * from './storage/patch/SparqlUpdatePatcher';
// Storage/Quota
export * from './storage/quota/GlobalQuotaStrategy';
export * from './storage/quota/PodQuotaStrategy';
export * from './storage/quota/QuotaStrategy';
// Storage/Routing
export * from './storage/routing/BaseUrlRouterRule';
export * from './storage/routing/ConvertingRouterRule';
@ -302,6 +312,14 @@ export * from './storage/routing/PreferenceSupport';
export * from './storage/routing/RegexRouterRule';
export * from './storage/routing/RouterRule';
// Storage/Size-Reporter
export * from './storage/size-reporter/FileSizeReporter';
export * from './storage/size-reporter/Size';
export * from './storage/size-reporter/SizeReporter';
// Storage/Validators
export * from './storage/validators/QuotaValidator';
// Storage
export * from './storage/AtomicResourceStore';
export * from './storage/BaseResourceStore';

View File

@ -0,0 +1,10 @@
import type { DataAccessor } from './DataAccessor';
/**
* The AtomicDataAccessor interface has identical function signatures as
* the DataAccessor, with the additional constraint that every function call
* must be atomic in its effect: either the call fully succeeds, reaching the
* desired new state; or it fails, upon which the resulting state remains
* identical to the one before the call.
*/
export interface AtomicDataAccessor extends DataAccessor { }

View File

@ -0,0 +1,62 @@
import { mkdirSync, promises as fsPromises } from 'fs';
import type { Readable } from 'stream';
import { v4 } from 'uuid';
import type { RepresentationMetadata } from '../../http/representation/RepresentationMetadata';
import type { ResourceIdentifier } from '../../http/representation/ResourceIdentifier';
import type { Guarded } from '../../util/GuardedStream';
import { joinFilePath } from '../../util/PathUtil';
import type { FileIdentifierMapper } from '../mapping/FileIdentifierMapper';
import type { AtomicDataAccessor } from './AtomicDataAccessor';
import { FileDataAccessor } from './FileDataAccessor';
/**
* AtomicDataAccessor that uses the file system to store documents as files and containers as folders.
* Data will first be written to a temporary location and only if no errors occur
* will the data be written to the desired location.
*/
export class AtomicFileDataAccessor extends FileDataAccessor implements AtomicDataAccessor {
private readonly tempFilePath: string;
public constructor(resourceMapper: FileIdentifierMapper, rootFilePath: string, tempFilePath: string) {
super(resourceMapper);
this.tempFilePath = joinFilePath(rootFilePath, tempFilePath);
// Cannot use fsPromises in constructor
mkdirSync(this.tempFilePath, { recursive: true });
}
/**
* Writes the given data as a file (and potential metadata as additional file).
* Data will first be written to a temporary file and if no errors occur only then the
* file will be moved to desired destination.
* If the stream errors it is made sure the temporary file will be deleted.
* The metadata file will only be written if the data was written successfully.
*/
public async writeDocument(identifier: ResourceIdentifier, data: Guarded<Readable>, metadata: RepresentationMetadata):
Promise<void> {
const link = await this.resourceMapper.mapUrlToFilePath(identifier, false, metadata.contentType);
// Generate temporary file name
const tempFilePath = joinFilePath(this.tempFilePath, `temp-${v4()}.txt`);
try {
await this.writeDataFile(tempFilePath, data);
// Check if we already have a corresponding file with a different extension
await this.verifyExistingExtension(link);
// When no quota errors occur move the file to its desired location
await fsPromises.rename(tempFilePath, link.filePath);
} catch (error: unknown) {
// Delete the data already written
try {
if ((await this.getStats(tempFilePath)).isFile()) {
await fsPromises.unlink(tempFilePath);
}
} catch {
throw error;
}
throw error;
}
await this.writeMetadata(link, metadata);
}
}

View File

@ -22,7 +22,7 @@ import type { DataAccessor } from './DataAccessor';
* DataAccessor that uses the file system to store documents as files and containers as folders.
*/
export class FileDataAccessor implements DataAccessor {
private readonly resourceMapper: FileIdentifierMapper;
protected readonly resourceMapper: FileIdentifierMapper;
public constructor(resourceMapper: FileIdentifierMapper) {
this.resourceMapper = resourceMapper;
@ -149,7 +149,7 @@ export class FileDataAccessor implements DataAccessor {
* @throws NotFoundHttpError
* If the file/folder doesn't exist.
*/
private async getStats(path: string): Promise<Stats> {
protected async getStats(path: string): Promise<Stats> {
try {
return await fsPromises.stat(path);
} catch (error: unknown) {
@ -192,7 +192,7 @@ export class FileDataAccessor implements DataAccessor {
*
* @returns True if data was written to a file.
*/
private async writeMetadata(link: ResourceLink, metadata: RepresentationMetadata): Promise<boolean> {
protected async writeMetadata(link: ResourceLink, metadata: RepresentationMetadata): Promise<boolean> {
// These are stored by file system conventions
metadata.remove(RDF.terms.type, LDP.terms.Resource);
metadata.remove(RDF.terms.type, LDP.terms.Container);
@ -327,7 +327,7 @@ export class FileDataAccessor implements DataAccessor {
*
* @param link - ResourceLink corresponding to the new resource data.
*/
private async verifyExistingExtension(link: ResourceLink): Promise<void> {
protected async verifyExistingExtension(link: ResourceLink): Promise<void> {
try {
// Delete the old file with the (now) wrong extension
const oldLink = await this.resourceMapper.mapUrlToFilePath(link.identifier, false);
@ -347,11 +347,14 @@ export class FileDataAccessor implements DataAccessor {
* @param path - The filepath of the file to be created.
* @param data - The data to be put in the file.
*/
private async writeDataFile(path: string, data: Readable): Promise<void> {
protected async writeDataFile(path: string, data: Readable): Promise<void> {
return new Promise((resolve, reject): any => {
const writeStream = createWriteStream(path);
data.pipe(writeStream);
data.on('error', reject);
data.on('error', (error): void => {
reject(error);
writeStream.end();
});
writeStream.on('error', reject);
writeStream.on('finish', resolve);

View File

@ -0,0 +1,49 @@
import type { Readable } from 'stream';
import type { Representation } from '../../http/representation/Representation';
import type { RepresentationMetadata } from '../../http/representation/RepresentationMetadata';
import type { ResourceIdentifier } from '../../http/representation/ResourceIdentifier';
import type { Guarded } from '../../util/GuardedStream';
import type { AtomicDataAccessor } from './AtomicDataAccessor';
import type { DataAccessor } from './DataAccessor';
/**
* DataAccessor that calls the corresponding functions of the source DataAccessor.
* Can be extended by data accessors that do not want to override all functions
* by implementing a decorator pattern.
*/
export class PassthroughDataAccessor implements DataAccessor {
protected readonly accessor: AtomicDataAccessor;
public constructor(accessor: DataAccessor) {
this.accessor = accessor;
}
public async writeDocument(identifier: ResourceIdentifier, data: Guarded<Readable>, metadata: RepresentationMetadata):
Promise<void> {
return this.accessor.writeDocument(identifier, data, metadata);
}
public async writeContainer(identifier: ResourceIdentifier, metadata: RepresentationMetadata): Promise<void> {
return this.accessor.writeContainer(identifier, metadata);
}
public async canHandle(representation: Representation): Promise<void> {
return this.accessor.canHandle(representation);
}
public async getData(identifier: ResourceIdentifier): Promise<Guarded<Readable>> {
return this.accessor.getData(identifier);
}
public async getMetadata(identifier: ResourceIdentifier): Promise<RepresentationMetadata> {
return this.accessor.getMetadata(identifier);
}
public getChildren(identifier: ResourceIdentifier): AsyncIterableIterator<RepresentationMetadata> {
return this.accessor.getChildren(identifier);
}
public async deleteResource(identifier: ResourceIdentifier): Promise<void> {
return this.accessor.deleteResource(identifier);
}
}

View File

@ -0,0 +1,40 @@
import type { Readable } from 'stream';
import type { Validator } from '../../http/auxiliary/Validator';
import { BasicRepresentation } from '../../http/representation/BasicRepresentation';
import type { RepresentationMetadata } from '../../http/representation/RepresentationMetadata';
import type { ResourceIdentifier } from '../../http/representation/ResourceIdentifier';
import type { Guarded } from '../../util/GuardedStream';
import type { DataAccessor } from './DataAccessor';
import { PassthroughDataAccessor } from './PassthroughDataAccessor';
/**
* A ValidatingDataAccessor wraps a DataAccessor such that the data stream is validated while being written.
* An AtomicDataAccessor can be used to prevent data being written in case validation fails.
*/
export class ValidatingDataAccessor extends PassthroughDataAccessor {
private readonly validator: Validator;
public constructor(accessor: DataAccessor, validator: Validator) {
super(accessor);
this.validator = validator;
}
public async writeDocument(
identifier: ResourceIdentifier,
data: Guarded<Readable>,
metadata: RepresentationMetadata,
): Promise<void> {
const pipedRep = await this.validator.handleSafe({
representation: new BasicRepresentation(data, metadata),
identifier,
});
return this.accessor.writeDocument(identifier, pipedRep.data, metadata);
}
public async writeContainer(identifier: ResourceIdentifier, metadata: RepresentationMetadata): Promise<void> {
// A container's data mainly resides in its metadata,
// of which we can't calculate the disk size of at this point in the code.
// Extra info can be found here: https://github.com/solid/community-server/pull/973#discussion_r723376888
return this.accessor.writeContainer(identifier, metadata);
}
}

View File

@ -0,0 +1,19 @@
import type { Size } from '../size-reporter/Size';
import type { SizeReporter } from '../size-reporter/SizeReporter';
import { QuotaStrategy } from './QuotaStrategy';
/**
* The GlobalQuotaStrategy sets a limit on the amount of data stored on the server globally.
*/
export class GlobalQuotaStrategy extends QuotaStrategy {
private readonly base: string;
public constructor(limit: Size, reporter: SizeReporter<any>, base: string) {
super(reporter, limit);
this.base = base;
}
protected async getTotalSpaceUsed(): Promise<Size> {
return this.reporter.getSize({ path: this.base });
}
}

View File

@ -0,0 +1,66 @@
import type { RepresentationMetadata } from '../../http/representation/RepresentationMetadata';
import type { ResourceIdentifier } from '../../http/representation/ResourceIdentifier';
import { NotFoundHttpError } from '../../util/errors/NotFoundHttpError';
import type { IdentifierStrategy } from '../../util/identifiers/IdentifierStrategy';
import { RDF, PIM } from '../../util/Vocabularies';
import type { DataAccessor } from '../accessors/DataAccessor';
import type { Size } from '../size-reporter/Size';
import type { SizeReporter } from '../size-reporter/SizeReporter';
import { QuotaStrategy } from './QuotaStrategy';
/**
* The PodQuotaStrategy sets a limit on the amount of data stored on a per pod basis
*/
export class PodQuotaStrategy extends QuotaStrategy {
private readonly identifierStrategy: IdentifierStrategy;
private readonly accessor: DataAccessor;
public constructor(
limit: Size,
reporter: SizeReporter<any>,
identifierStrategy: IdentifierStrategy,
accessor: DataAccessor,
) {
super(reporter, limit);
this.identifierStrategy = identifierStrategy;
this.accessor = accessor;
}
protected async getTotalSpaceUsed(identifier: ResourceIdentifier): Promise<Size> {
const pimStorage = await this.searchPimStorage(identifier);
// No storage was found containing this identifier, so we assume this identifier points to an internal location.
// Quota does not apply here so there is always available space.
if (!pimStorage) {
return { amount: Number.MAX_SAFE_INTEGER, unit: this.limit.unit };
}
return this.reporter.getSize(pimStorage);
}
/** Finds the closest parent container that has pim:storage as metadata */
private async searchPimStorage(identifier: ResourceIdentifier): Promise<ResourceIdentifier | undefined> {
if (this.identifierStrategy.isRootContainer(identifier)) {
return;
}
let metadata: RepresentationMetadata;
const parent = this.identifierStrategy.getParentContainer(identifier);
try {
metadata = await this.accessor.getMetadata(identifier);
} catch (error: unknown) {
if (error instanceof NotFoundHttpError) {
// Resource and/or its metadata do not exist
return this.searchPimStorage(parent);
}
throw error;
}
const hasPimStorageMetadata = metadata!.getAll(RDF.type)
.some((term): boolean => term.value === PIM.Storage);
return hasPimStorageMetadata ? identifier : this.searchPimStorage(parent);
}
}

View File

@ -0,0 +1,105 @@
// These two eslint lines are needed to store 'this' in a variable so it can be used
// in the PassThrough of createQuotaGuard
/* eslint-disable @typescript-eslint/no-this-alias */
/* eslint-disable consistent-this */
import { PassThrough } from 'stream';
import type { RepresentationMetadata } from '../../http/representation/RepresentationMetadata';
import type { ResourceIdentifier } from '../../http/representation/ResourceIdentifier';
import { PayloadHttpError } from '../../util/errors/PayloadHttpError';
import type { Guarded } from '../../util/GuardedStream';
import { guardStream } from '../../util/GuardedStream';
import type { Size } from '../size-reporter/Size';
import type { SizeReporter } from '../size-reporter/SizeReporter';
/**
* A QuotaStrategy is used when we want to set a limit to the amount of data that can be
* stored on the server.
* This can range from a limit for the whole server to a limit on a per pod basis.
* The way the size of a resource is calculated is implemented by the implementing classes.
* This can be bytes, quads, file count, ...
*/
export abstract class QuotaStrategy {
public readonly reporter: SizeReporter<any>;
public readonly limit: Size;
public constructor(reporter: SizeReporter<any>, limit: Size) {
this.reporter = reporter;
this.limit = limit;
}
/**
* Get the available space when writing data to the given identifier.
* If the given resource already exists it will deduct the already taken up
* space by that resource since it is going to be overwritten and thus counts
* as available space.
*
* @param identifier - the identifier of the resource of which you want the available space
* @returns the available space and the unit of the space as a Size object
*/
public async getAvailableSpace(identifier: ResourceIdentifier): Promise<Size> {
const totalUsed = await this.getTotalSpaceUsed(identifier);
// Ignore identifiers where quota does not apply
if (totalUsed.amount === Number.MAX_SAFE_INTEGER) {
return totalUsed;
}
// When a file is overwritten the space the file takes up right now should also
// be counted as available space as it will disappear/be overwritten
totalUsed.amount -= (await this.reporter.getSize(identifier)).amount;
return {
amount: this.limit.amount - totalUsed.amount,
unit: this.limit.unit,
};
}
/**
* Get the currently used/occupied space.
*
* @param identifier - the identifier that should be used to calculate the total
* @returns a Size object containing the requested value.
* If quota is not relevant for this identifier, Size.amount should be Number.MAX_SAFE_INTEGER
*/
protected abstract getTotalSpaceUsed(identifier: ResourceIdentifier): Promise<Size>;
/**
* Get an estimated size of the resource
*
* @param metadata - the metadata that might include the size
* @returns a Size object containing the estimated size and unit of the resource
*/
public async estimateSize(metadata: RepresentationMetadata): Promise<Size | undefined> {
const estimate = await this.reporter.estimateSize(metadata);
return estimate ? { unit: this.limit.unit, amount: estimate } : undefined;
}
/**
* Get a Passthrough stream that will keep track of the available space.
* If the quota is exceeded the stream will emit an error and destroy itself.
* Like other Passthrough instances this will simply pass on the chunks, when the quota isn't exceeded.
*
* @param identifier - the identifier of the resource in question
* @returns a Passthrough instance that errors when quota is exceeded
*/
public async createQuotaGuard(identifier: ResourceIdentifier): Promise<Guarded<PassThrough>> {
let total = 0;
const strategy = this;
const { reporter } = this;
return guardStream(new PassThrough({
async transform(this, chunk: any, enc: string, done: () => void): Promise<void> {
total += await reporter.calculateChunkSize(chunk);
const availableSpace = await strategy.getAvailableSpace(identifier);
if (availableSpace.amount < total) {
this.destroy(new PayloadHttpError(
`Quota exceeded by ${total - availableSpace.amount} ${availableSpace.unit} during write`,
));
}
this.push(chunk);
done();
},
}));
}
}

View File

@ -0,0 +1,87 @@
import type { Stats } from 'fs';
import { promises as fsPromises } from 'fs';
import type { RepresentationMetadata } from '../../http/representation/RepresentationMetadata';
import type { ResourceIdentifier } from '../../http/representation/ResourceIdentifier';
import { joinFilePath, normalizeFilePath, trimTrailingSlashes } from '../../util/PathUtil';
import type { FileIdentifierMapper } from '../mapping/FileIdentifierMapper';
import type { Size } from './Size';
import { UNIT_BYTES } from './Size';
import type { SizeReporter } from './SizeReporter';
/**
* SizeReporter that is used to calculate sizes of resources for a file based system.
*/
export class FileSizeReporter implements SizeReporter<string> {
private readonly fileIdentifierMapper: FileIdentifierMapper;
private readonly ignoreFolders: RegExp[];
private readonly rootFilePath: string;
public constructor(fileIdentifierMapper: FileIdentifierMapper, rootFilePath: string, ignoreFolders?: string[]) {
this.fileIdentifierMapper = fileIdentifierMapper;
this.ignoreFolders = ignoreFolders ? ignoreFolders.map((folder: string): RegExp => new RegExp(folder, 'u')) : [];
this.rootFilePath = normalizeFilePath(rootFilePath);
}
/** The FileSizeReporter will always return data in the form of bytes */
public getUnit(): string {
return UNIT_BYTES;
}
/**
* Returns the size of the given resource ( and its children ) in bytes
*/
public async getSize(identifier: ResourceIdentifier): Promise<Size> {
const fileLocation = (await this.fileIdentifierMapper.mapUrlToFilePath(identifier, false)).filePath;
return { unit: this.getUnit(), amount: await this.getTotalSize(fileLocation) };
}
public async calculateChunkSize(chunk: string): Promise<number> {
return chunk.length;
}
/** The estimated size of a resource in this reporter is simply the content-length header */
public async estimateSize(metadata: RepresentationMetadata): Promise<number | undefined> {
return metadata.contentLength;
}
/**
* Get the total size of a resource and its children if present
*
* @param fileLocation - the resource of which you want the total size of ( on disk )
* @returns a number specifying how many bytes are used by the resource
*/
private async getTotalSize(fileLocation: string): Promise<number> {
let stat: Stats;
// Check if the file exists
try {
stat = await fsPromises.stat(fileLocation);
} catch {
return 0;
}
// If the file's location points to a file, simply return the file's size
if (stat.isFile()) {
return stat.size;
}
// If the location DOES exist and is NOT a file it should be a directory
// recursively add all sizes of children to the total
const childFiles = await fsPromises.readdir(fileLocation);
const rootFilePathLength = trimTrailingSlashes(this.rootFilePath).length;
return await childFiles.reduce(async(acc: Promise<number>, current): Promise<number> => {
const childFileLocation = normalizeFilePath(joinFilePath(fileLocation, current));
let result = await acc;
// Exclude internal files
if (!this.ignoreFolders.some((folder: RegExp): boolean =>
folder.test(childFileLocation.slice(rootFilePathLength)))) {
result += await this.getTotalSize(childFileLocation);
}
return result;
}, Promise.resolve(stat.size));
}
}

View File

@ -0,0 +1,9 @@
/**
* Describes the size of something by stating how much of a certain unit is present.
*/
export interface Size {
unit: string;
amount: number;
}
export const UNIT_BYTES = 'bytes';

View File

@ -0,0 +1,44 @@
import type { RepresentationMetadata } from '../../http/representation/RepresentationMetadata';
import type { ResourceIdentifier } from '../../http/representation/ResourceIdentifier';
import type { Size } from './Size';
/**
* A SizeReporter's only purpose (at the moment) is to calculate the size
* of a resource. How the size is calculated or what unit it is in is defined by
* the class implementing this interface.
* One might use the amount of bytes and another might use the amount of triples
* stored in a resource.
*/
export interface SizeReporter<T> {
/**
* Get the unit as a string in which a SizeReporter returns data
*/
getUnit: () => string;
/**
* Get the size of a given resource
*
* @param identifier - the resource of which you want the size
* @returns The size of the resource as a Size object calculated recursively
* if the identifier leads to a container
*/
getSize: (identifier: ResourceIdentifier) => Promise<Size>;
/**
* Calculate the size of a chunk based on which SizeReporter is being used
*
* @param chunk - the chunk of which you want the size
* @returns the size of the passed chunk as a number
*/
calculateChunkSize: (chunk: T) => Promise<number>;
/**
* Estimate the size of a body / request by looking at its metadata
*
* @param metadata - the metadata of the resource you want an estimated size of
* @returns the estimated size of the body / request or undefined if no
* meaningful estimation can be made
*/
estimateSize: (metadata: RepresentationMetadata) => Promise<number | undefined>;
}

View File

@ -0,0 +1,61 @@
import { Readable, PassThrough } from 'stream';
import { Validator } from '../../http/auxiliary/Validator';
import type { ValidatorInput } from '../../http/auxiliary/Validator';
import type { Representation } from '../../http/representation/Representation';
import { PayloadHttpError } from '../../util/errors/PayloadHttpError';
import type { Guarded } from '../../util/GuardedStream';
import { guardStream } from '../../util/GuardedStream';
import { pipeSafely } from '../../util/StreamUtil';
import type { QuotaStrategy } from '../quota/QuotaStrategy';
/**
* The QuotaValidator validates data streams by making sure they would not exceed the limits of a QuotaStrategy.
*/
export class QuotaValidator extends Validator {
private readonly strategy: QuotaStrategy;
public constructor(strategy: QuotaStrategy) {
super();
this.strategy = strategy;
}
public async handle({ representation, identifier }: ValidatorInput): Promise<Representation> {
const { data, metadata } = representation;
// 1. Get the available size
const availableSize = await this.strategy.getAvailableSpace(identifier);
// 2. Check if the estimated size is bigger then the available size
const estimatedSize = await this.strategy.estimateSize(metadata);
if (estimatedSize && availableSize.amount < estimatedSize.amount) {
return {
...representation,
data: guardStream(new Readable({
read(this): void {
this.destroy(new PayloadHttpError(
`Quota exceeded: Advertised Content-Length is ${estimatedSize.amount} ${estimatedSize.unit} ` +
`and only ${availableSize.amount} ${availableSize.unit} is available`,
));
},
})),
};
}
// 3. Track if quota is exceeded during writing
const tracking: Guarded<PassThrough> = await this.strategy.createQuotaGuard(identifier);
// 4. Double check quota is not exceeded after write (concurrent writing possible)
const afterWrite = new PassThrough({
flush: async(done): Promise<void> => {
const availableSpace = (await this.strategy.getAvailableSpace(identifier)).amount;
done(availableSpace < 0 ? new PayloadHttpError('Quota exceeded after write completed') : undefined);
},
});
return {
...representation,
data: pipeSafely(pipeSafely(data, tracking), afterWrite),
};
}
}

View File

@ -86,6 +86,10 @@ export const FOAF = createUriAndTermNamespace('http://xmlns.com/foaf/0.1/',
'Agent',
);
export const HH = createUriAndTermNamespace('http://www.w3.org/2011/http-headers#',
'content-length',
);
export const HTTP = createUriAndTermNamespace('http://www.w3.org/2011/http#',
'statusCodeNumber',
);
@ -155,6 +159,7 @@ export const XSD = createUriAndTermNamespace('http://www.w3.org/2001/XMLSchema#'
);
// Alias for commonly used types
export const CONTENT_LENGTH_TERM = HH.terms['content-length'];
export const CONTENT_TYPE = MA.format;
export const CONTENT_TYPE_TERM = MA.terms.format;
export const PREFERRED_PREFIX = VANN.preferredNamespacePrefix;

View File

@ -0,0 +1,23 @@
import type { HttpErrorOptions } from './HttpError';
import { HttpError } from './HttpError';
/**
* An error thrown when data exceeded the pre configured quota
*/
export class PayloadHttpError extends HttpError {
/**
* Default message is 'Storage quota was exceeded.'.
* @param message - Optional, more specific, message.
* @param options - Optional error options.
*/
public constructor(message?: string, options?: HttpErrorOptions) {
super(413,
'PayloadHttpError',
message ?? 'Storage quota was exceeded.',
options);
}
public static isInstance(error: any): error is PayloadHttpError {
return HttpError.isInstance(error) && error.statusCode === 413;
}
}

View File

@ -0,0 +1,222 @@
import { promises as fsPromises } from 'fs';
import type { Stats } from 'fs';
import fetch from 'cross-fetch';
import type { Response } from 'cross-fetch';
import { joinFilePath, joinUrl } from '../../src';
import type { App } from '../../src';
import { getPort } from '../util/Util';
import { getDefaultVariables, getTestConfigPath, getTestFolder, instantiateFromConfig, removeFolder } from './Config';
/** Performs a simple PUT request to the given 'path' with a body containing 'length' amount of characters */
async function performSimplePutWithLength(path: string, length: number): Promise<Response> {
return fetch(
path,
{
method: 'PUT',
headers: {
'content-type': 'text/plain',
},
body: 'A'.repeat(length),
},
);
}
/** Registers two test pods on the server matching the 'baseUrl' */
async function registerTestPods(baseUrl: string, pods: string[]): Promise<void> {
for (const pod of pods) {
await fetch(`${baseUrl}idp/register/`, {
method: 'POST',
headers: {
'content-type': 'application/json',
},
body: JSON.stringify({
createWebId: 'on',
webId: '',
register: 'on',
createPod: 'on',
podName: pod,
email: `${pod}@example.ai`,
password: 't',
confirmPassword: 't',
submit: '',
}),
});
}
}
/* We just want a container with the correct metadata, everything else can be removed */
async function clearInitialFiles(rootFilePath: string, pods: string[]): Promise<void> {
for (const pod of pods) {
const fileList = await fsPromises.readdir(joinFilePath(rootFilePath, pod));
for (const file of fileList) {
if (file !== '.meta') {
const path = joinFilePath(rootFilePath, pod, file);
if ((await fsPromises.stat(path)).isDirectory()) {
await fsPromises.rmdir(path, { recursive: true });
} else {
await fsPromises.unlink(path);
}
}
}
}
}
describe('A quota server', (): void => {
// The allowed quota depends on what filesystem/OS you are using.
// For example: an empty folder is reported as
// - 0KB on NTFS (most of the time, mileage may vary)
// - 0-...KB on APFS (depending on its contents and settings)
// - 4O96KB on FAT
// This is why we need to determine the size of a folder on the current system.
let folderSizeTest: Stats;
beforeAll(async(): Promise<void> => {
// We want to use an empty folder as on APFS/Mac folder sizes vary a lot
const tempFolder = getTestFolder('quota-temp');
await fsPromises.mkdir(tempFolder);
folderSizeTest = await fsPromises.stat(tempFolder);
await removeFolder(tempFolder);
});
const podName1 = 'arthur';
const podName2 = 'abel';
/** Test the general functionality of the server using pod quota */
describe('with pod quota enabled', (): void => {
const port = getPort('PodQuota');
const baseUrl = `http://localhost:${port}/`;
const pod1 = joinUrl(baseUrl, podName1);
const pod2 = joinUrl(baseUrl, podName2);
const rootFilePath = getTestFolder('quota-pod');
let app: App;
beforeAll(async(): Promise<void> => {
// Calculate the allowed quota depending on file system used
const size = folderSizeTest.size + 4000;
const instances = await instantiateFromConfig(
'urn:solid-server:test:Instances',
getTestConfigPath('quota-pod.json'),
{
...getDefaultVariables(port, baseUrl),
'urn:solid-server:default:variable:rootFilePath': rootFilePath,
'urn:solid-server:default:variable:PodQuota': size,
},
) as Record<string, any>;
({ app } = instances);
await app.start();
// Initialize 2 pods
await registerTestPods(baseUrl, [ podName1, podName2 ]);
await clearInitialFiles(rootFilePath, [ podName1, podName2 ]);
});
afterAll(async(): Promise<void> => {
await app.stop();
await removeFolder(rootFilePath);
});
// Test quota in the first pod
it('should return a 413 when the quota is exceeded during write.', async(): Promise<void> => {
const testFile1 = `${pod1}/test1.txt`;
const testFile2 = `${pod1}/test2.txt`;
const response1 = performSimplePutWithLength(testFile1, 2000);
await expect(response1).resolves.toBeDefined();
expect((await response1).status).toEqual(201);
const response2 = performSimplePutWithLength(testFile2, 2500);
await expect(response2).resolves.toBeDefined();
expect((await response2).status).toEqual(413);
});
// Test if writing in another pod is still possible
it('should allow writing in a pod that is not full yet.', async(): Promise<void> => {
const testFile1 = `${pod2}/test1.txt`;
const response1 = performSimplePutWithLength(testFile1, 2000);
await expect(response1).resolves.toBeDefined();
expect((await response1).status).toEqual(201);
});
// Both pods should not accept this request anymore
it('should block PUT requests to different pods if their quota is exceeded.', async(): Promise<void> => {
const testFile1 = `${pod1}/test2.txt`;
const testFile2 = `${pod2}/test2.txt`;
const response1 = performSimplePutWithLength(testFile1, 2500);
await expect(response1).resolves.toBeDefined();
expect((await response1).status).toEqual(413);
const response2 = performSimplePutWithLength(testFile2, 2500);
await expect(response2).resolves.toBeDefined();
expect((await response2).status).toEqual(413);
});
});
/** Test the general functionality of the server using global quota */
describe('with global quota enabled', (): void => {
const port = getPort('GlobalQuota');
const baseUrl = `http://localhost:${port}/`;
const pod1 = `${baseUrl}${podName1}`;
const pod2 = `${baseUrl}${podName2}`;
const rootFilePath = getTestFolder('quota-global');
let app: App;
beforeAll(async(): Promise<void> => {
// Calculate the allowed quota depending on file system used
const size = (folderSizeTest.size * 3) + 4000;
const instances = await instantiateFromConfig(
'urn:solid-server:test:Instances',
getTestConfigPath('quota-global.json'),
{
...getDefaultVariables(port, baseUrl),
'urn:solid-server:default:variable:rootFilePath': rootFilePath,
'urn:solid-server:default:variable:GlobalQuota': size,
},
) as Record<string, any>;
({ app } = instances);
await app.start();
// Initialize 2 pods
await registerTestPods(baseUrl, [ podName1, podName2 ]);
await clearInitialFiles(rootFilePath, [ podName1, podName2 ]);
});
afterAll(async(): Promise<void> => {
await app.stop();
await removeFolder(rootFilePath);
});
it('should return 413 when global quota is exceeded.', async(): Promise<void> => {
const testFile1 = `${baseUrl}test1.txt`;
const testFile2 = `${baseUrl}test2.txt`;
const response1 = performSimplePutWithLength(testFile1, 2000);
await expect(response1).resolves.toBeDefined();
const awaitedRes1 = await response1;
expect(awaitedRes1.status).toEqual(201);
const response2 = performSimplePutWithLength(testFile2, 2500);
await expect(response2).resolves.toBeDefined();
const awaitedRes2 = await response2;
expect(awaitedRes2.status).toEqual(413);
});
it('should return 413 when trying to write to any pod when global quota is exceeded.', async(): Promise<void> => {
const testFile1 = `${pod1}/test3.txt`;
const testFile2 = `${pod2}/test4.txt`;
const response1 = performSimplePutWithLength(testFile1, 2500);
await expect(response1).resolves.toBeDefined();
const awaitedRes1 = await response1;
expect(awaitedRes1.status).toEqual(413);
const response2 = performSimplePutWithLength(testFile2, 2500);
await expect(response2).resolves.toBeDefined();
const awaitedRes2 = await response2;
expect(awaitedRes2.status).toEqual(413);
});
});
});

View File

@ -0,0 +1,65 @@
{
"@context": "https://linkedsoftwaredependencies.org/bundles/npm/@solid/community-server/^2.0.0/components/context.jsonld",
"import": [
"files-scs:config/app/main/default.json",
"files-scs:config/app/init/initialize-root.json",
"files-scs:config/app/setup/disabled.json",
"files-scs:config/http/handler/default.json",
"files-scs:config/http/middleware/websockets.json",
"files-scs:config/http/server-factory/websockets.json",
"files-scs:config/http/static/default.json",
"files-scs:config/identity/access/public.json",
"files-scs:config/identity/email/default.json",
"files-scs:config/identity/handler/default.json",
"files-scs:config/identity/ownership/token.json",
"files-scs:config/identity/pod/static.json",
"files-scs:config/identity/registration/enabled.json",
"files-scs:config/ldp/authentication/dpop-bearer.json",
"files-scs:config/ldp/authorization/allow-all.json",
"files-scs:config/ldp/handler/default.json",
"files-scs:config/ldp/metadata-parser/default.json",
"files-scs:config/ldp/metadata-writer/default.json",
"files-scs:config/ldp/modes/default.json",
"files-scs:config/storage/backend/global-quota-file.json",
"files-scs:config/storage/key-value/resource-store.json",
"files-scs:config/storage/middleware/default.json",
"files-scs:config/util/auxiliary/acl.json",
"files-scs:config/util/identifiers/suffix.json",
"files-scs:config/util/index/default.json",
"files-scs:config/util/logging/winston.json",
"files-scs:config/util/representation-conversion/default.json",
"files-scs:config/util/resource-locker/memory.json",
"files-scs:config/util/variables/default.json"
],
"@graph": [
{
"comment": "A single-pod server that stores its resources on disk while enforcing quota."
},
{
"comment": "The set quota enforced globally",
"@id": "urn:solid-server:default:variable:GlobalQuota",
"@type": "Variable"
},
{
"@id": "urn:solid-server:default:QuotaStrategy",
"GlobalQuotaStrategy:_limit_amount": {
"@id": "urn:solid-server:default:variable:GlobalQuota"
},
"GlobalQuotaStrategy:_limit_unit": "bytes"
},
{
"@id": "urn:solid-server:default:SizeReporter",
"FileSizeReporter:_ignoreFolders": [ "^/\\.internal$" ]
},
{
"@id": "urn:solid-server:test:Instances",
"@type": "RecordObject",
"record": [
{
"RecordObject:_record_key": "app",
"RecordObject:_record_value": { "@id": "urn:solid-server:default:App" }
}
]
}
]
}

View File

@ -0,0 +1,61 @@
{
"@context": "https://linkedsoftwaredependencies.org/bundles/npm/@solid/community-server/^2.0.0/components/context.jsonld",
"import": [
"files-scs:config/app/main/default.json",
"files-scs:config/app/init/initialize-root.json",
"files-scs:config/app/setup/disabled.json",
"files-scs:config/http/handler/default.json",
"files-scs:config/http/middleware/websockets.json",
"files-scs:config/http/server-factory/websockets.json",
"files-scs:config/http/static/default.json",
"files-scs:config/identity/access/public.json",
"files-scs:config/identity/email/default.json",
"files-scs:config/identity/handler/default.json",
"files-scs:config/identity/ownership/token.json",
"files-scs:config/identity/pod/static.json",
"files-scs:config/identity/registration/enabled.json",
"files-scs:config/ldp/authentication/dpop-bearer.json",
"files-scs:config/ldp/authorization/allow-all.json",
"files-scs:config/ldp/handler/default.json",
"files-scs:config/ldp/metadata-parser/default.json",
"files-scs:config/ldp/metadata-writer/default.json",
"files-scs:config/ldp/modes/default.json",
"files-scs:config/storage/backend/pod-quota-file.json",
"files-scs:config/storage/key-value/resource-store.json",
"files-scs:config/storage/middleware/default.json",
"files-scs:config/util/auxiliary/acl.json",
"files-scs:config/util/identifiers/suffix.json",
"files-scs:config/util/index/default.json",
"files-scs:config/util/logging/winston.json",
"files-scs:config/util/representation-conversion/default.json",
"files-scs:config/util/resource-locker/memory.json",
"files-scs:config/util/variables/default.json"
],
"@graph": [
{
"comment": "A single-pod server that stores its resources on disk while enforcing quota."
},
{
"comment": "The set quota enforced per pod",
"@id": "urn:solid-server:default:variable:PodQuota",
"@type": "Variable"
},
{
"@id": "urn:solid-server:default:QuotaStrategy",
"PodQuotaStrategy:_limit_amount": {
"@id": "urn:solid-server:default:variable:PodQuota"
},
"PodQuotaStrategy:_limit_unit": "bytes"
},
{
"@id": "urn:solid-server:test:Instances",
"@type": "RecordObject",
"record": [
{
"RecordObject:_record_key": "app",
"RecordObject:_record_value": { "@id": "urn:solid-server:default:App" }
}
]
}
]
}

View File

@ -61,10 +61,10 @@ describe('A ComposedAuxiliaryStrategy', (): void => {
});
it('validates data through the Validator.', async(): Promise<void> => {
const representation = { data: 'data!' } as any;
const representation = { data: 'data!', metadata: { identifier: { value: 'any' }}} as any;
await expect(strategy.validate(representation)).resolves.toBeUndefined();
expect(validator.handleSafe).toHaveBeenCalledTimes(1);
expect(validator.handleSafe).toHaveBeenLastCalledWith(representation);
expect(validator.handleSafe).toHaveBeenLastCalledWith({ representation, identifier: { path: 'any' }});
});
it('defaults isRequiredInRoot to false.', async(): Promise<void> => {

View File

@ -1,5 +1,6 @@
import { RdfValidator } from '../../../../src/http/auxiliary/RdfValidator';
import { BasicRepresentation } from '../../../../src/http/representation/BasicRepresentation';
import type { ResourceIdentifier } from '../../../../src/http/representation/ResourceIdentifier';
import type { RepresentationConverter } from '../../../../src/storage/conversion/RepresentationConverter';
import { readableToString } from '../../../../src/util/StreamUtil';
import { StaticAsyncHandler } from '../../../util/StaticAsyncHandler';
@ -8,6 +9,7 @@ import 'jest-rdf';
describe('An RdfValidator', (): void => {
let converter: RepresentationConverter;
let validator: RdfValidator;
const identifier: ResourceIdentifier = { path: 'any/path' };
beforeEach(async(): Promise<void> => {
converter = new StaticAsyncHandler<any>(true, null);
@ -20,14 +22,15 @@ describe('An RdfValidator', (): void => {
it('always accepts content-type internal/quads.', async(): Promise<void> => {
const representation = new BasicRepresentation('data', 'internal/quads');
await expect(validator.handle(representation)).resolves.toBeUndefined();
await expect(validator.handle({ representation, identifier })).resolves.toEqual(representation);
});
it('validates data by running it through a converter.', async(): Promise<void> => {
converter.handleSafe = jest.fn().mockResolvedValue(new BasicRepresentation('transformedData', 'wrongType'));
const representation = new BasicRepresentation('data', 'content-type');
const quads = representation.metadata.quads();
await expect(validator.handle(representation)).resolves.toBeUndefined();
// Output is not important for this Validator
await expect(validator.handle({ representation, identifier })).resolves.toBeDefined();
// Make sure the data can still be streamed
await expect(readableToString(representation.data)).resolves.toBe('data');
// Make sure the metadata was not changed
@ -37,7 +40,7 @@ describe('An RdfValidator', (): void => {
it('throws an error when validating invalid data.', async(): Promise<void> => {
converter.handleSafe = jest.fn().mockRejectedValue(new Error('bad data!'));
const representation = new BasicRepresentation('data', 'content-type');
await expect(validator.handle(representation)).rejects.toThrow('bad data!');
await expect(validator.handle({ representation, identifier })).rejects.toThrow('bad data!');
// Make sure the data on the readable has not been reset
expect(representation.data.destroyed).toBe(true);
});

View File

@ -0,0 +1,32 @@
import { ContentLengthParser } from '../../../../../src/http/input/metadata/ContentLengthParser';
import { RepresentationMetadata } from '../../../../../src/http/representation/RepresentationMetadata';
import type { HttpRequest } from '../../../../../src/server/HttpRequest';
describe('A ContentLengthParser', (): void => {
const parser = new ContentLengthParser();
let request: HttpRequest;
let metadata: RepresentationMetadata;
beforeEach(async(): Promise<void> => {
request = { headers: {}} as HttpRequest;
metadata = new RepresentationMetadata();
});
it('does nothing if there is no content-length header.', async(): Promise<void> => {
await expect(parser.handle({ request, metadata })).resolves.toBeUndefined();
expect(metadata.quads()).toHaveLength(0);
});
it('sets the given content-length as metadata.', async(): Promise<void> => {
request.headers['content-length'] = '50';
await expect(parser.handle({ request, metadata })).resolves.toBeUndefined();
expect(metadata.quads()).toHaveLength(1);
expect(metadata.contentLength).toBe(50);
});
it('does not set a content-length when the header is invalid.', async(): Promise<void> => {
request.headers['content-length'] = 'aabbcc50ccbbaa';
await expect(parser.handle({ request, metadata })).resolves.toBeUndefined();
expect(metadata.quads()).toHaveLength(0);
});
});

View File

@ -60,6 +60,16 @@ describe('A RepresentationMetadata', (): void => {
expect(metadata.contentType).toEqual('text/turtle');
});
it('stores the content-length correctly.', async(): Promise<void> => {
metadata = new RepresentationMetadata();
metadata.contentLength = 50;
expect(metadata.contentLength).toEqual(50);
metadata = new RepresentationMetadata();
metadata.contentLength = undefined;
expect(metadata.contentLength).toBeUndefined();
});
it('copies an other metadata object.', async(): Promise<void> => {
const other = new RepresentationMetadata({ path: 'otherId' }, { 'test:pred': 'objVal' });
metadata = new RepresentationMetadata(other);

View File

@ -0,0 +1,37 @@
import type { ResourceIdentifier } from '../../../src/http/representation/ResourceIdentifier';
import { GlobalQuotaStrategy } from '../../../src/storage/quota/GlobalQuotaStrategy';
import { UNIT_BYTES } from '../../../src/storage/size-reporter/Size';
import type { Size } from '../../../src/storage/size-reporter/Size';
import type { SizeReporter } from '../../../src/storage/size-reporter/SizeReporter';
describe('GlobalQuotaStrategy', (): void => {
let strategy: GlobalQuotaStrategy;
let mockSize: Size;
let mockReporter: jest.Mocked<SizeReporter<any>>;
let mockBase: string;
beforeEach((): void => {
mockSize = { amount: 2000, unit: UNIT_BYTES };
mockBase = '';
mockReporter = {
getSize: jest.fn(async(identifier: ResourceIdentifier): Promise<Size> => ({
unit: mockSize.unit,
// This mock will return 1000 as size of the root and 50 for any other resource
amount: identifier.path === mockBase ? 1000 : 50,
})),
getUnit: jest.fn().mockReturnValue(mockSize.unit),
calculateChunkSize: jest.fn(async(chunk: any): Promise<number> => chunk.length),
estimateSize: jest.fn().mockResolvedValue(5),
};
strategy = new GlobalQuotaStrategy(mockSize, mockReporter, mockBase);
});
describe('getAvailableSpace()', (): void => {
it('should return the correct amount of available space left.', async(): Promise<void> => {
const result = strategy.getAvailableSpace({ path: 'any/path' });
await expect(result).resolves.toEqual(
expect.objectContaining({ amount: mockSize.amount - 950 }),
);
});
});
});

View File

@ -0,0 +1,77 @@
import { RepresentationMetadata } from '../../../src/http/representation/RepresentationMetadata';
import type { ResourceIdentifier } from '../../../src/http/representation/ResourceIdentifier';
import type { DataAccessor } from '../../../src/storage/accessors/DataAccessor';
import { PodQuotaStrategy } from '../../../src/storage/quota/PodQuotaStrategy';
import { UNIT_BYTES } from '../../../src/storage/size-reporter/Size';
import type { Size } from '../../../src/storage/size-reporter/Size';
import type { SizeReporter } from '../../../src/storage/size-reporter/SizeReporter';
import { NotFoundHttpError } from '../../../src/util/errors/NotFoundHttpError';
import type { IdentifierStrategy } from '../../../src/util/identifiers/IdentifierStrategy';
import { SingleRootIdentifierStrategy } from '../../../src/util/identifiers/SingleRootIdentifierStrategy';
import { PIM, RDF } from '../../../src/util/Vocabularies';
import { mockFs } from '../../util/Util';
jest.mock('fs');
describe('PodQuotaStrategy', (): void => {
let strategy: PodQuotaStrategy;
let mockSize: Size;
let mockReporter: jest.Mocked<SizeReporter<any>>;
let identifierStrategy: IdentifierStrategy;
let accessor: jest.Mocked<DataAccessor>;
const base = 'http://localhost:3000/';
const rootFilePath = 'folder';
beforeEach((): void => {
jest.restoreAllMocks();
mockFs(rootFilePath, new Date());
mockSize = { amount: 2000, unit: UNIT_BYTES };
identifierStrategy = new SingleRootIdentifierStrategy(base);
mockReporter = {
getSize: jest.fn().mockResolvedValue({ unit: mockSize.unit, amount: 50 }),
getUnit: jest.fn().mockReturnValue(mockSize.unit),
calculateChunkSize: jest.fn(async(chunk: any): Promise<number> => chunk.length),
estimateSize: jest.fn().mockResolvedValue(5),
};
accessor = {
// Assume that the pod is called "nested"
getMetadata: jest.fn().mockImplementation(
async(identifier: ResourceIdentifier): Promise<RepresentationMetadata> => {
const res = new RepresentationMetadata();
if (identifier.path === `${base}nested/`) {
res.add(RDF.type, PIM.Storage);
}
return res;
},
),
} as any;
strategy = new PodQuotaStrategy(mockSize, mockReporter, identifierStrategy, accessor);
});
describe('getAvailableSpace()', (): void => {
it('should return a Size containing MAX_SAFE_INTEGER when writing outside a pod.', async(): Promise<void> => {
const result = strategy.getAvailableSpace({ path: `${base}file.txt` });
await expect(result).resolves.toEqual(expect.objectContaining({ amount: Number.MAX_SAFE_INTEGER }));
});
it('should ignore the size of the existing resource when writing inside a pod.', async(): Promise<void> => {
const result = strategy.getAvailableSpace({ path: `${base}nested/nested2/file.txt` });
await expect(result).resolves.toEqual(expect.objectContaining({ amount: mockSize.amount }));
expect(mockReporter.getSize).toHaveBeenCalledTimes(2);
});
it('should return a Size containing the available space when writing inside a pod.', async(): Promise<void> => {
accessor.getMetadata.mockImplementationOnce((): any => {
throw new NotFoundHttpError();
});
const result = strategy.getAvailableSpace({ path: `${base}nested/nested2/file.txt` });
await expect(result).resolves.toEqual(expect.objectContaining({ amount: mockSize.amount }));
expect(mockReporter.getSize).toHaveBeenCalledTimes(2);
});
it('should throw when looking for pim:Storage errors.', async(): Promise<void> => {
accessor.getMetadata.mockImplementationOnce((): any => {
throw new Error('error');
});
const result = strategy.getAvailableSpace({ path: `${base}nested/nested2/file.txt` });
await expect(result).rejects.toThrow('error');
});
});
});

View File

@ -0,0 +1,88 @@
import { RepresentationMetadata } from '../../../src/http/representation/RepresentationMetadata';
import { QuotaStrategy } from '../../../src/storage/quota/QuotaStrategy';
import { UNIT_BYTES } from '../../../src/storage/size-reporter/Size';
import type { Size } from '../../../src/storage/size-reporter/Size';
import type { SizeReporter } from '../../../src/storage/size-reporter/SizeReporter';
import { guardedStreamFrom, pipeSafely } from '../../../src/util/StreamUtil';
import { mockFs } from '../../util/Util';
jest.mock('fs');
class QuotaStrategyWrapper extends QuotaStrategy {
public constructor(reporter: SizeReporter<any>, limit: Size) {
super(reporter, limit);
}
public getAvailableSpace = async(): Promise<Size> => ({ unit: UNIT_BYTES, amount: 5 });
protected getTotalSpaceUsed = async(): Promise<Size> => ({ unit: UNIT_BYTES, amount: 5 });
}
describe('A QuotaStrategy', (): void => {
let strategy: QuotaStrategyWrapper;
let mockSize: Size;
let mockReporter: jest.Mocked<SizeReporter<any>>;
const base = 'http://localhost:3000/';
const rootFilePath = 'folder';
beforeEach((): void => {
jest.restoreAllMocks();
mockFs(rootFilePath, new Date());
mockSize = { amount: 2000, unit: UNIT_BYTES };
mockReporter = {
getSize: jest.fn().mockResolvedValue({ unit: mockSize.unit, amount: 50 }),
getUnit: jest.fn().mockReturnValue(mockSize.unit),
calculateChunkSize: jest.fn(async(chunk: any): Promise<number> => chunk.length),
estimateSize: jest.fn().mockResolvedValue(5),
};
strategy = new QuotaStrategyWrapper(mockReporter, mockSize);
});
describe('constructor()', (): void => {
it('should set the passed parameters as properties.', async(): Promise<void> => {
expect(strategy.limit).toEqual(mockSize);
expect(strategy.reporter).toEqual(mockReporter);
});
});
describe('estimateSize()', (): void => {
it('should return a Size object containing the correct unit and amount.', async(): Promise<void> => {
await expect(strategy.estimateSize(new RepresentationMetadata())).resolves.toEqual(
// This '5' comes from the reporter mock a little up in this file
expect.objectContaining({ unit: mockSize.unit, amount: 5 }),
);
});
it('should return undefined when the reporter returns undefined.', async(): Promise<void> => {
mockReporter.estimateSize.mockResolvedValueOnce(undefined);
await expect(strategy.estimateSize(new RepresentationMetadata())).resolves.toBeUndefined();
});
});
describe('createQuotaGuard()', (): void => {
it('should return a passthrough that destroys the stream when quota is exceeded.', async(): Promise<void> => {
strategy.getAvailableSpace = jest.fn().mockReturnValue({ amount: 50, unit: mockSize.unit });
const fiftyChars = 'A'.repeat(50);
const stream = guardedStreamFrom(fiftyChars);
const track = await strategy.createQuotaGuard({ path: `${base}nested/file2.txt` });
const piped = pipeSafely(stream, track);
for (let i = 0; i < 10; i++) {
stream.push(fiftyChars);
}
expect(piped.destroyed).toBe(false);
for (let i = 0; i < 10; i++) {
stream.push(fiftyChars);
}
expect(piped.destroyed).toBe(false);
stream.push(fiftyChars);
const destroy = new Promise<void>((resolve): void => {
piped.on('error', (): void => resolve());
});
await expect(destroy).resolves.toBeUndefined();
});
});
});

View File

@ -0,0 +1,97 @@
import 'jest-rdf';
import type { Readable } from 'stream';
import { RepresentationMetadata } from '../../../../src/http/representation/RepresentationMetadata';
import { AtomicFileDataAccessor } from '../../../../src/storage/accessors/AtomicFileDataAccessor';
import { ExtensionBasedMapper } from '../../../../src/storage/mapping/ExtensionBasedMapper';
import { APPLICATION_OCTET_STREAM } from '../../../../src/util/ContentTypes';
import type { Guarded } from '../../../../src/util/GuardedStream';
import { guardedStreamFrom } from '../../../../src/util/StreamUtil';
import { CONTENT_TYPE } from '../../../../src/util/Vocabularies';
import { mockFs } from '../../../util/Util';
jest.mock('fs');
describe('AtomicFileDataAccessor', (): void => {
const rootFilePath = 'uploads';
const base = 'http://test.com/';
let accessor: AtomicFileDataAccessor;
let cache: { data: any };
let metadata: RepresentationMetadata;
let data: Guarded<Readable>;
beforeEach(async(): Promise<void> => {
cache = mockFs(rootFilePath, new Date());
accessor = new AtomicFileDataAccessor(
new ExtensionBasedMapper(base, rootFilePath),
rootFilePath,
'./.internal/tempFiles/',
);
// The 'mkdirSync' in AtomicFileDataAccessor's constructor does not seem to create the folder in the
// cache object used for mocking fs.
// This line creates what represents a folder in the cache object
cache.data['.internal'] = { tempFiles: {}};
metadata = new RepresentationMetadata(APPLICATION_OCTET_STREAM);
data = guardedStreamFrom([ 'data' ]);
});
describe('writing a document', (): void => {
it('writes the data to the corresponding file.', async(): Promise<void> => {
await expect(accessor.writeDocument({ path: `${base}resource` }, data, metadata)).resolves.toBeUndefined();
expect(cache.data.resource).toBe('data');
});
it('writes metadata to the corresponding metadata file.', async(): Promise<void> => {
metadata = new RepresentationMetadata({ path: `${base}res.ttl` },
{ [CONTENT_TYPE]: 'text/turtle', likes: 'apples' });
await expect(accessor.writeDocument({ path: `${base}res.ttl` }, data, metadata)).resolves.toBeUndefined();
expect(cache.data['res.ttl']).toBe('data');
expect(cache.data['res.ttl.meta']).toMatch(`<${base}res.ttl> <likes> "apples".`);
});
it('should delete temp file when done writing.', async(): Promise<void> => {
await expect(accessor.writeDocument({ path: `${base}resource` }, data, metadata)).resolves.toBeUndefined();
expect(Object.keys(cache.data['.internal'].tempFiles)).toHaveLength(0);
expect(cache.data.resource).toBe('data');
});
it('should throw an error when writing the data goes wrong.', async(): Promise<void> => {
data.read = jest.fn((): any => {
data.emit('error', new Error('error'));
return null;
});
jest.requireMock('fs').promises.stat = jest.fn((): any => ({
isFile: (): boolean => false,
}));
await expect(accessor.writeDocument({ path: `${base}res.ttl` }, data, metadata)).rejects.toThrow('error');
});
it('should throw when renaming / moving the file goes wrong.', async(): Promise<void> => {
jest.requireMock('fs').promises.rename = jest.fn((): any => {
throw new Error('error');
});
jest.requireMock('fs').promises.stat = jest.fn((): any => ({
isFile: (): boolean => true,
}));
await expect(accessor.writeDocument({ path: `${base}res.ttl` }, data, metadata)).rejects.toThrow('error');
});
it('should (on error) not unlink the temp file if it does not exist.', async(): Promise<void> => {
jest.requireMock('fs').promises.rename = jest.fn((): any => {
throw new Error('error');
});
jest.requireMock('fs').promises.stat = jest.fn((): any => ({
isFile: (): boolean => false,
}));
await expect(accessor.writeDocument({ path: `${base}res.ttl` }, data, metadata)).rejects.toThrow('error');
});
it('should throw when renaming / moving the file goes wrong and the temp file does not exist.',
async(): Promise<void> => {
jest.requireMock('fs').promises.rename = jest.fn((): any => {
throw new Error('error');
});
jest.requireMock('fs').promises.stat = jest.fn();
await expect(accessor.writeDocument({ path: `${base}res.ttl` }, data, metadata)).rejects.toThrow('error');
});
});
});

View File

@ -0,0 +1,80 @@
import { BasicRepresentation } from '../../../../src/http/representation/BasicRepresentation';
import { RepresentationMetadata } from '../../../../src/http/representation/RepresentationMetadata';
import type { DataAccessor } from '../../../../src/storage/accessors/DataAccessor';
import { PassthroughDataAccessor } from '../../../../src/storage/accessors/PassthroughDataAccessor';
import { guardedStreamFrom } from '../../../../src/util/StreamUtil';
describe('ValidatingDataAccessor', (): void => {
let passthrough: PassthroughDataAccessor;
let childAccessor: jest.Mocked<DataAccessor>;
const mockIdentifier = { path: 'http://localhost/test.txt' };
const mockMetadata = new RepresentationMetadata();
const mockData = guardedStreamFrom('test string');
const mockRepresentation = new BasicRepresentation(mockData, mockMetadata);
beforeEach(async(): Promise<void> => {
jest.clearAllMocks();
childAccessor = {
canHandle: jest.fn(),
writeDocument: jest.fn(),
getData: jest.fn(),
getChildren: jest.fn(),
writeContainer: jest.fn(),
deleteResource: jest.fn(),
getMetadata: jest.fn(),
};
childAccessor.getChildren = jest.fn();
passthrough = new PassthroughDataAccessor(childAccessor);
});
describe('writeDocument()', (): void => {
it('should call the accessors writeDocument() function.', async(): Promise<void> => {
await passthrough.writeDocument(mockIdentifier, mockData, mockMetadata);
expect(childAccessor.writeDocument).toHaveBeenCalledTimes(1);
expect(childAccessor.writeDocument).toHaveBeenCalledWith(mockIdentifier, mockData, mockMetadata);
});
});
describe('canHandle()', (): void => {
it('should call the accessors canHandle() function.', async(): Promise<void> => {
await passthrough.canHandle(mockRepresentation);
expect(childAccessor.canHandle).toHaveBeenCalledTimes(1);
expect(childAccessor.canHandle).toHaveBeenCalledWith(mockRepresentation);
});
});
describe('getData()', (): void => {
it('should call the accessors getData() function.', async(): Promise<void> => {
await passthrough.getData(mockIdentifier);
expect(childAccessor.getData).toHaveBeenCalledTimes(1);
expect(childAccessor.getData).toHaveBeenCalledWith(mockIdentifier);
});
});
describe('getMetadata()', (): void => {
it('should call the accessors getMetadata() function.', async(): Promise<void> => {
await passthrough.getMetadata(mockIdentifier);
expect(childAccessor.getMetadata).toHaveBeenCalledTimes(1);
expect(childAccessor.getMetadata).toHaveBeenCalledWith(mockIdentifier);
});
});
describe('getChildren()', (): void => {
it('should call the accessors getChildren() function.', async(): Promise<void> => {
passthrough.getChildren(mockIdentifier);
expect(childAccessor.getChildren).toHaveBeenCalledTimes(1);
expect(childAccessor.getChildren).toHaveBeenCalledWith(mockIdentifier);
});
});
describe('deleteResource()', (): void => {
it('should call the accessors deleteResource() function.', async(): Promise<void> => {
await passthrough.deleteResource(mockIdentifier);
expect(childAccessor.deleteResource).toHaveBeenCalledTimes(1);
expect(childAccessor.deleteResource).toHaveBeenCalledWith(mockIdentifier);
});
});
describe('writeContainer()', (): void => {
it('should call the accessors writeContainer() function.', async(): Promise<void> => {
await passthrough.writeContainer(mockIdentifier, mockMetadata);
expect(childAccessor.writeContainer).toHaveBeenCalledTimes(1);
expect(childAccessor.writeContainer).toHaveBeenCalledWith(mockIdentifier, mockMetadata);
});
});
});

View File

@ -0,0 +1,54 @@
import type { Validator, ValidatorInput } from '../../../../src/http/auxiliary/Validator';
import { BasicRepresentation } from '../../../../src/http/representation/BasicRepresentation';
import type { Representation } from '../../../../src/http/representation/Representation';
import { RepresentationMetadata } from '../../../../src/http/representation/RepresentationMetadata';
import type { DataAccessor } from '../../../../src/storage/accessors/DataAccessor';
import { ValidatingDataAccessor } from '../../../../src/storage/accessors/ValidatingDataAccessor';
import { guardedStreamFrom } from '../../../../src/util/StreamUtil';
describe('ValidatingDataAccessor', (): void => {
let validatingAccessor: ValidatingDataAccessor;
let childAccessor: jest.Mocked<DataAccessor>;
let validator: jest.Mocked<Validator>;
const mockIdentifier = { path: 'http://localhost/test.txt' };
const mockMetadata = new RepresentationMetadata();
const mockData = guardedStreamFrom('test string');
const mockRepresentation = new BasicRepresentation(mockData, mockMetadata);
beforeEach(async(): Promise<void> => {
jest.clearAllMocks();
childAccessor = {
writeDocument: jest.fn(),
writeContainer: jest.fn(),
} as any;
childAccessor.getChildren = jest.fn();
validator = {
handleSafe: jest.fn(async(input: ValidatorInput): Promise<Representation> => input.representation),
} as any;
validatingAccessor = new ValidatingDataAccessor(childAccessor, validator);
});
describe('writeDocument()', (): void => {
it('should call the validator\'s handleSafe() function.', async(): Promise<void> => {
await validatingAccessor.writeDocument(mockIdentifier, mockData, mockMetadata);
expect(validator.handleSafe).toHaveBeenCalledTimes(1);
expect(validator.handleSafe).toHaveBeenCalledWith({
representation: mockRepresentation,
identifier: mockIdentifier,
});
});
it('should call the accessors writeDocument() function.', async(): Promise<void> => {
await validatingAccessor.writeDocument(mockIdentifier, mockData, mockMetadata);
expect(childAccessor.writeDocument).toHaveBeenCalledTimes(1);
expect(childAccessor.writeDocument).toHaveBeenCalledWith(mockIdentifier, mockData, mockMetadata);
});
});
describe('writeContainer()', (): void => {
it('should call the accessors writeContainer() function.', async(): Promise<void> => {
await validatingAccessor.writeContainer(mockIdentifier, mockMetadata);
expect(childAccessor.writeContainer).toHaveBeenCalledTimes(1);
expect(childAccessor.writeContainer).toHaveBeenCalledWith(mockIdentifier, mockMetadata);
});
});
});

View File

@ -0,0 +1,132 @@
import { promises as fsPromises } from 'fs';
import { RepresentationMetadata } from '../../../../src/http/representation/RepresentationMetadata';
import type { ResourceIdentifier } from '../../../../src/http/representation/ResourceIdentifier';
import type { FileIdentifierMapper, ResourceLink } from '../../../../src/storage/mapping/FileIdentifierMapper';
import { FileSizeReporter } from '../../../../src/storage/size-reporter/FileSizeReporter';
import { UNIT_BYTES } from '../../../../src/storage/size-reporter/Size';
import { joinFilePath } from '../../../../src/util/PathUtil';
import { mockFs } from '../../../util/Util';
jest.mock('fs');
describe('A FileSizeReporter', (): void => {
// Folder size is fixed to 4 in the mock
const folderSize = 4;
const mapper: jest.Mocked<FileIdentifierMapper> = {
mapFilePathToUrl: jest.fn(),
mapUrlToFilePath: jest.fn().mockImplementation((id: ResourceIdentifier): ResourceLink => ({
filePath: id.path,
identifier: id,
isMetadata: false,
})),
};
const fileRoot = joinFilePath(process.cwd(), '/test-folder/');
const fileSizeReporter = new FileSizeReporter(
mapper,
fileRoot,
[ '^/\\.internal$' ],
);
beforeEach(async(): Promise<void> => {
mockFs(fileRoot);
});
it('should work without the ignoreFolders constructor parameter.', async(): Promise<void> => {
const tempFileSizeReporter = new FileSizeReporter(
mapper,
fileRoot,
);
const testFile = joinFilePath(fileRoot, '/test.txt');
await fsPromises.writeFile(testFile, 'A'.repeat(20));
const result = tempFileSizeReporter.getSize({ path: testFile });
await expect(result).resolves.toBeDefined();
expect((await result).amount).toBe(20);
});
it('should report the right file size.', async(): Promise<void> => {
const testFile = joinFilePath(fileRoot, '/test.txt');
await fsPromises.writeFile(testFile, 'A'.repeat(20));
const result = fileSizeReporter.getSize({ path: testFile });
await expect(result).resolves.toBeDefined();
expect((await result).amount).toBe(20);
});
it('should work recursively.', async(): Promise<void> => {
const containerFile = joinFilePath(fileRoot, '/test-folder-1/');
await fsPromises.mkdir(containerFile, { recursive: true });
const testFile = joinFilePath(containerFile, '/test.txt');
await fsPromises.writeFile(testFile, 'A'.repeat(20));
const fileSize = fileSizeReporter.getSize({ path: testFile });
const containerSize = fileSizeReporter.getSize({ path: containerFile });
await expect(fileSize).resolves.toEqual(expect.objectContaining({ amount: 20 }));
await expect(containerSize).resolves.toEqual(expect.objectContaining({ amount: 20 + folderSize }));
});
it('should not count files located in an ignored folder.', async(): Promise<void> => {
const containerFile = joinFilePath(fileRoot, '/test-folder-2/');
await fsPromises.mkdir(containerFile, { recursive: true });
const testFile = joinFilePath(containerFile, '/test.txt');
await fsPromises.writeFile(testFile, 'A'.repeat(20));
const internalContainerFile = joinFilePath(fileRoot, '/.internal/');
await fsPromises.mkdir(internalContainerFile, { recursive: true });
const internalTestFile = joinFilePath(internalContainerFile, '/test.txt');
await fsPromises.writeFile(internalTestFile, 'A'.repeat(30));
const fileSize = fileSizeReporter.getSize({ path: testFile });
const containerSize = fileSizeReporter.getSize({ path: containerFile });
const rootSize = fileSizeReporter.getSize({ path: fileRoot });
const expectedFileSize = 20;
const expectedContainerSize = 20 + folderSize;
const expectedRootSize = expectedContainerSize + folderSize;
await expect(fileSize).resolves.toEqual(expect.objectContaining({ amount: expectedFileSize }));
await expect(containerSize).resolves.toEqual(expect.objectContaining({ amount: expectedContainerSize }));
await expect(rootSize).resolves.toEqual(expect.objectContaining({ amount: expectedRootSize }));
});
it('should have the unit in its return value.', async(): Promise<void> => {
const testFile = joinFilePath(fileRoot, '/test2.txt');
await fsPromises.writeFile(testFile, 'A'.repeat(20));
const result = fileSizeReporter.getSize({ path: testFile });
await expect(result).resolves.toBeDefined();
expect((await result).unit).toBe(UNIT_BYTES);
});
it('getUnit() should return UNIT_BYTES.', (): void => {
expect(fileSizeReporter.getUnit()).toBe(UNIT_BYTES);
});
it('should return 0 when the size of a non existent file is requested.', async(): Promise<void> => {
const result = fileSizeReporter.getSize({ path: joinFilePath(fileRoot, '/test.txt') });
await expect(result).resolves.toEqual(expect.objectContaining({ amount: 0 }));
});
it('should calculate the chunk size correctly.', async(): Promise<void> => {
const testString = 'testesttesttesttest==testtest';
const result = fileSizeReporter.calculateChunkSize(testString);
await expect(result).resolves.toEqual(testString.length);
});
describe('estimateSize()', (): void => {
it('should return the content-length.', async(): Promise<void> => {
const metadata = new RepresentationMetadata();
metadata.contentLength = 100;
await expect(fileSizeReporter.estimateSize(metadata)).resolves.toEqual(100);
});
it(
'should return undefined if no content-length is present in the metadata.',
async(): Promise<void> => {
const metadata = new RepresentationMetadata();
await expect(fileSizeReporter.estimateSize(metadata)).resolves.toBeUndefined();
},
);
});
});

View File

@ -0,0 +1,120 @@
import type { Readable } from 'stream';
import { PassThrough } from 'stream';
import type { ValidatorInput } from '../../../../src/http/auxiliary/Validator';
import { BasicRepresentation } from '../../../../src/http/representation/BasicRepresentation';
import { RepresentationMetadata } from '../../../../src/http/representation/RepresentationMetadata';
import type { ResourceIdentifier } from '../../../../src/http/representation/ResourceIdentifier';
import type { QuotaStrategy } from '../../../../src/storage/quota/QuotaStrategy';
import { UNIT_BYTES } from '../../../../src/storage/size-reporter/Size';
import type { SizeReporter } from '../../../../src/storage/size-reporter/SizeReporter';
import { QuotaValidator } from '../../../../src/storage/validators/QuotaValidator';
import { guardStream } from '../../../../src/util/GuardedStream';
import type { Guarded } from '../../../../src/util/GuardedStream';
import { guardedStreamFrom, readableToString } from '../../../../src/util/StreamUtil';
describe('QuotaValidator', (): void => {
let mockedStrategy: jest.Mocked<QuotaStrategy>;
let validator: QuotaValidator;
let identifier: ResourceIdentifier;
let mockMetadata: RepresentationMetadata;
let mockData: Guarded<Readable>;
let mockInput: ValidatorInput;
let mockReporter: jest.Mocked<SizeReporter<any>>;
beforeEach((): void => {
jest.clearAllMocks();
identifier = { path: 'http://localhost/' };
mockMetadata = new RepresentationMetadata();
mockData = guardedStreamFrom([ 'test string' ]);
mockInput = {
representation: new BasicRepresentation(mockData, mockMetadata),
identifier,
};
mockReporter = {
getSize: jest.fn(),
getUnit: jest.fn(),
calculateChunkSize: jest.fn(),
estimateSize: jest.fn().mockResolvedValue(8),
};
mockedStrategy = {
reporter: mockReporter,
limit: { unit: UNIT_BYTES, amount: 8 },
getAvailableSpace: jest.fn().mockResolvedValue({ unit: UNIT_BYTES, amount: 10 }),
estimateSize: jest.fn().mockResolvedValue({ unit: UNIT_BYTES, amount: 8 }),
createQuotaGuard: jest.fn().mockResolvedValue(guardStream(new PassThrough())),
} as any;
validator = new QuotaValidator(mockedStrategy);
});
describe('handle()', (): void => {
// Step 2
it('should destroy the stream when estimated size is larger than the available size.', async(): Promise<void> => {
mockedStrategy.estimateSize.mockResolvedValueOnce({ unit: UNIT_BYTES, amount: 11 });
const result = validator.handle(mockInput);
await expect(result).resolves.toBeDefined();
const awaitedResult = await result;
const prom = new Promise<void>((resolve, reject): void => {
awaitedResult.data.on('error', (): void => resolve());
awaitedResult.data.on('end', (): void => reject(new Error('reject')));
});
// Consume the stream
await expect(readableToString(awaitedResult.data))
.rejects.toThrow('Quota exceeded: Advertised Content-Length is');
await expect(prom).resolves.toBeUndefined();
});
// Step 3
it('should destroy the stream when quota is exceeded during write.', async(): Promise<void> => {
mockedStrategy.createQuotaGuard.mockResolvedValueOnce(guardStream(new PassThrough({
async transform(this): Promise<void> {
this.destroy(new Error('error'));
},
})));
const result = validator.handle(mockInput);
await expect(result).resolves.toBeDefined();
const awaitedResult = await result;
const prom = new Promise<void>((resolve, reject): void => {
awaitedResult.data.on('error', (): void => resolve());
awaitedResult.data.on('end', (): void => reject(new Error('reject')));
});
// Consume the stream
await expect(readableToString(awaitedResult.data)).rejects.toThrow('error');
expect(mockedStrategy.createQuotaGuard).toHaveBeenCalledTimes(1);
await expect(prom).resolves.toBeUndefined();
});
// Step 4
it('should throw when quota were exceeded after stream was finished.', async(): Promise<void> => {
const result = validator.handle(mockInput);
// Putting this after the handle / before consuming the stream will only effect
// this function in the flush part of the code.
mockedStrategy.getAvailableSpace.mockResolvedValueOnce({ unit: UNIT_BYTES, amount: -100 });
await expect(result).resolves.toBeDefined();
const awaitedResult = await result;
const prom = new Promise<void>((resolve, reject): void => {
awaitedResult.data.on('error', (): void => resolve());
awaitedResult.data.on('end', (): void => reject(new Error('reject')));
});
// Consume the stream
await expect(readableToString(awaitedResult.data)).rejects.toThrow('Quota exceeded after write completed');
await expect(prom).resolves.toBeUndefined();
});
it('should return a stream that is consumable without error if quota isn\'t exceeded.', async(): Promise<void> => {
const result = validator.handle(mockInput);
await expect(result).resolves.toBeDefined();
const awaitedResult = await result;
await expect(readableToString(awaitedResult.data)).resolves.toBe('test string');
});
});
});

View File

@ -7,6 +7,7 @@ import { InternalServerError } from '../../../../src/util/errors/InternalServerE
import { MethodNotAllowedHttpError } from '../../../../src/util/errors/MethodNotAllowedHttpError';
import { NotFoundHttpError } from '../../../../src/util/errors/NotFoundHttpError';
import { NotImplementedHttpError } from '../../../../src/util/errors/NotImplementedHttpError';
import { PayloadHttpError } from '../../../../src/util/errors/PayloadHttpError';
import { PreconditionFailedHttpError } from '../../../../src/util/errors/PreconditionFailedHttpError';
import { UnauthorizedHttpError } from '../../../../src/util/errors/UnauthorizedHttpError';
import { UnsupportedMediaTypeHttpError } from '../../../../src/util/errors/UnsupportedMediaTypeHttpError';
@ -27,6 +28,7 @@ describe('HttpError', (): void => {
[ 'MethodNotAllowedHttpError', 405, MethodNotAllowedHttpError ],
[ 'ConflictHttpError', 409, ConflictHttpError ],
[ 'PreconditionFailedHttpError', 412, PreconditionFailedHttpError ],
[ 'PayloadHttpError', 413, PayloadHttpError ],
[ 'UnsupportedMediaTypeHttpError', 415, UnsupportedMediaTypeHttpError ],
[ 'InternalServerError', 500, InternalServerError ],
[ 'NotImplementedHttpError', 501, NotImplementedHttpError ],

View File

@ -19,6 +19,8 @@ const portNames = [
'SparqlStorage',
'Subdomains',
'WebSocketsProtocol',
'PodQuota',
'GlobalQuota',
// Unit
'BaseHttpServerFactory',
] as const;
@ -122,7 +124,7 @@ export function mockFs(rootFilepath?: string, time?: Date): { data: any } {
isFile: (): boolean => typeof folder[name] === 'string',
isDirectory: (): boolean => typeof folder[name] === 'object',
isSymbolicLink: (): boolean => typeof folder[name] === 'symbol',
size: typeof folder[name] === 'string' ? folder[name].length : 0,
size: typeof folder[name] === 'string' ? folder[name].length : 4,
mtime: time,
} as Stats;
},
@ -199,6 +201,21 @@ export function mockFs(rootFilepath?: string, time?: Date): { data: any } {
const { folder, name } = getFolder(path);
folder[name] = data;
},
async rename(path: string, destination: string): Promise<void> {
const { folder, name } = getFolder(path);
if (!folder[name]) {
throwSystemError('ENOENT');
}
if (!(await this.lstat(path)).isFile()) {
throwSystemError('EISDIR');
}
const { folder: folderDest, name: nameDest } = getFolder(destination);
folderDest[nameDest] = folder[name];
// eslint-disable-next-line @typescript-eslint/no-dynamic-delete
delete folder[name];
},
},
};