mirror of
https://github.com/CommunitySolidServer/CommunitySolidServer.git
synced 2024-10-03 14:55:10 +00:00
feat: Add support for quota limits
* feat: implemented SizeReporter and FileSizeReporter * test: FileSizeReporter tests * feat: added QuotedDataAccessor * test: added extra test to check recursiveness of filesizereporter * feat: added QuotaStrategy interface * feat: further progress in different files * feat: wrote doc, tests and improved code * feat: fixed bugs and code is now runnable and buildable * feat: finished implementation * fix: revert accidental chanegs * fix: fileSizeReported did not count container size * fix: bug calculating container sizes fixed * test: FileSizeReporter tests * test: QuotaDataValidator tests * test: QuotaError tests * fix: removed console.log * doc: added doc to several files * doc: changed doc for QuotaStrategy to new implementation * fix: improved content length regex * feat: improved GlobalQuotaStrategy code * fix: made FileSizeReported readonly * feat: added comments to quota-file.json * fix: changed default tempFilePath variable * test: included new tempFilePath variable in testing * chore: created seperate command for start:file:quota to pass tests * feat: removed all sync fs calls from FileSizeReporter * feat: minor changes in multple files * fix: changed function signatures to be in line with others * feat: optimized quota data validation * feat: improved FileSizeReporter code * fix: corrected calculation of containersizes and fixed erroring edgecase * feat: save content-length as number in metadata * feat: added comments and changed GlobalQuotaStrategy constructor * feat: changed file names and added small comment * test: AtomicFileDataAccessor tests * test: completed FileSizeReporter tests * fix: content-length is now saved correctly in RepresentationMetadata * feat: adapted content length metadata + tests * fix: removed tempFilePath variable * fix: reverted .gitignore * fix: forgot to remove tempFilePath variable from componentsjs config * test: GlobalQuotaStrategy tests * feat: replaced DataValidator with Validator * feat: reworked DataValidator * feat: added calcultateChunkSize() to SizeReporter * test: updated FileSizeReporter tests * fix: tempFile location now relative to rootFilePath * test: QuotaDataValidator tests * fix: corrected FileSizeReporter tests * fix: adapted FileSizeReporter tests * fix: FileSizeReporter bug on Windows * fix: regex linting error * feat: changed Validator class * feat: added PodQuotaStrategy to enable suota on a per pod basis * chore: bump context versions * fix: Capitalized comments in json file * chore: renamed ValidatorArgs to ValidatorInput * chore: order all exports * fix: made TODO comment clearer * chore: added seperated config files for global and pod based quota + fixed comments * chore: made minor changes to comments * feat: added PassthroughDataAccessor * feat: added PasstroughtDataAccessor + tests * fix: added invalid header check to ContentLengthParser * chore: improved mocks * chore: move quota limit higher up in config * fix: atomicity issue in AtomicFileDataAccessor * chore: moved .internal folder to config from FileSizeReporter * fix: improved algorithm to ignore folders while calculating file size in FileSizeReporter * fix: changes to support containers in the future * fix: added error handling to prevent reading of unexistent files * feat: added generic type to SizeReporter to calculate chunk sizes * test: use mocked DataAccessor * chore: added some comments to test and made minor improvement * fix: fs mock rename * chore: QuotaStrategy.estimateSize refactor * chore: move trackAvailableSpace to abstract class QuotaStrategy * fix: improved test case * test: quota integration tests * chore: edited some comments * chore: change lstat to stat * feat: moved estimateSize to SizeReporter to be consistent with calcultateChunkSize * test: finish up tests to reach coverage * fix: basic config * fix: minor changes to test CI run * fix: small fix for windows * fix: improved writing to file * chore: linting errors * chore: rename trackAvailableSpace * test: improved integration tests * test: logging info for test debugging * test: extra logging for debugging * test: logging for debugging * test: logging for debugging * test: logging for debugging * test: improved Quota integration test setup * test: improve quota tests for CI run * test: debugging Quota test * test: uncommented global quota test * test: changed global quota parameters * test: logging for debugging * test: logging cleanup * chore: minor changes, mostly typo fixes * chore: remove console.log * fix: getting inconsistent results * chore: try fix index.ts CI error * chore: try fix CI error * chore: try fix CI error * chore: revert last commits * chore: fix inconsistent files with origin * test: minor test improvements * chore: minor refactors and improvements * fix: added extra try catch for breaking bug * chore: improve config * chore: minor code improvements * test: use mockFs * feat: add extra check in podQuotaStrategy * chore: replace handle by handleSafe in ValidatingDataAccessor * chore: typo * test: improved Quota integration tests * test: made comment in test more correct * fix: rm -> rmdir for backwards compatibility * fix: fsPromises issue * chore: leave out irrelevant config * chore: removed start script from package.json * fix: Small fixes Co-authored-by: Joachim Van Herwegen <joachimvh@gmail.com>
This commit is contained in:
222
test/integration/Quota.test.ts
Normal file
222
test/integration/Quota.test.ts
Normal file
@@ -0,0 +1,222 @@
|
||||
import { promises as fsPromises } from 'fs';
|
||||
import type { Stats } from 'fs';
|
||||
import fetch from 'cross-fetch';
|
||||
import type { Response } from 'cross-fetch';
|
||||
import { joinFilePath, joinUrl } from '../../src';
|
||||
import type { App } from '../../src';
|
||||
import { getPort } from '../util/Util';
|
||||
import { getDefaultVariables, getTestConfigPath, getTestFolder, instantiateFromConfig, removeFolder } from './Config';
|
||||
|
||||
/** Performs a simple PUT request to the given 'path' with a body containing 'length' amount of characters */
|
||||
async function performSimplePutWithLength(path: string, length: number): Promise<Response> {
|
||||
return fetch(
|
||||
path,
|
||||
{
|
||||
method: 'PUT',
|
||||
headers: {
|
||||
'content-type': 'text/plain',
|
||||
},
|
||||
body: 'A'.repeat(length),
|
||||
},
|
||||
);
|
||||
}
|
||||
|
||||
/** Registers two test pods on the server matching the 'baseUrl' */
|
||||
async function registerTestPods(baseUrl: string, pods: string[]): Promise<void> {
|
||||
for (const pod of pods) {
|
||||
await fetch(`${baseUrl}idp/register/`, {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'content-type': 'application/json',
|
||||
},
|
||||
body: JSON.stringify({
|
||||
createWebId: 'on',
|
||||
webId: '',
|
||||
register: 'on',
|
||||
createPod: 'on',
|
||||
podName: pod,
|
||||
email: `${pod}@example.ai`,
|
||||
password: 't',
|
||||
confirmPassword: 't',
|
||||
submit: '',
|
||||
}),
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
/* We just want a container with the correct metadata, everything else can be removed */
|
||||
async function clearInitialFiles(rootFilePath: string, pods: string[]): Promise<void> {
|
||||
for (const pod of pods) {
|
||||
const fileList = await fsPromises.readdir(joinFilePath(rootFilePath, pod));
|
||||
for (const file of fileList) {
|
||||
if (file !== '.meta') {
|
||||
const path = joinFilePath(rootFilePath, pod, file);
|
||||
if ((await fsPromises.stat(path)).isDirectory()) {
|
||||
await fsPromises.rmdir(path, { recursive: true });
|
||||
} else {
|
||||
await fsPromises.unlink(path);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
describe('A quota server', (): void => {
|
||||
// The allowed quota depends on what filesystem/OS you are using.
|
||||
// For example: an empty folder is reported as
|
||||
// - 0KB on NTFS (most of the time, mileage may vary)
|
||||
// - 0-...KB on APFS (depending on its contents and settings)
|
||||
// - 4O96KB on FAT
|
||||
// This is why we need to determine the size of a folder on the current system.
|
||||
let folderSizeTest: Stats;
|
||||
beforeAll(async(): Promise<void> => {
|
||||
// We want to use an empty folder as on APFS/Mac folder sizes vary a lot
|
||||
const tempFolder = getTestFolder('quota-temp');
|
||||
await fsPromises.mkdir(tempFolder);
|
||||
folderSizeTest = await fsPromises.stat(tempFolder);
|
||||
await removeFolder(tempFolder);
|
||||
});
|
||||
const podName1 = 'arthur';
|
||||
const podName2 = 'abel';
|
||||
|
||||
/** Test the general functionality of the server using pod quota */
|
||||
describe('with pod quota enabled', (): void => {
|
||||
const port = getPort('PodQuota');
|
||||
const baseUrl = `http://localhost:${port}/`;
|
||||
const pod1 = joinUrl(baseUrl, podName1);
|
||||
const pod2 = joinUrl(baseUrl, podName2);
|
||||
const rootFilePath = getTestFolder('quota-pod');
|
||||
|
||||
let app: App;
|
||||
|
||||
beforeAll(async(): Promise<void> => {
|
||||
// Calculate the allowed quota depending on file system used
|
||||
const size = folderSizeTest.size + 4000;
|
||||
|
||||
const instances = await instantiateFromConfig(
|
||||
'urn:solid-server:test:Instances',
|
||||
getTestConfigPath('quota-pod.json'),
|
||||
{
|
||||
...getDefaultVariables(port, baseUrl),
|
||||
'urn:solid-server:default:variable:rootFilePath': rootFilePath,
|
||||
'urn:solid-server:default:variable:PodQuota': size,
|
||||
},
|
||||
) as Record<string, any>;
|
||||
({ app } = instances);
|
||||
await app.start();
|
||||
|
||||
// Initialize 2 pods
|
||||
await registerTestPods(baseUrl, [ podName1, podName2 ]);
|
||||
await clearInitialFiles(rootFilePath, [ podName1, podName2 ]);
|
||||
});
|
||||
|
||||
afterAll(async(): Promise<void> => {
|
||||
await app.stop();
|
||||
await removeFolder(rootFilePath);
|
||||
});
|
||||
|
||||
// Test quota in the first pod
|
||||
it('should return a 413 when the quota is exceeded during write.', async(): Promise<void> => {
|
||||
const testFile1 = `${pod1}/test1.txt`;
|
||||
const testFile2 = `${pod1}/test2.txt`;
|
||||
|
||||
const response1 = performSimplePutWithLength(testFile1, 2000);
|
||||
await expect(response1).resolves.toBeDefined();
|
||||
expect((await response1).status).toEqual(201);
|
||||
|
||||
const response2 = performSimplePutWithLength(testFile2, 2500);
|
||||
await expect(response2).resolves.toBeDefined();
|
||||
expect((await response2).status).toEqual(413);
|
||||
});
|
||||
|
||||
// Test if writing in another pod is still possible
|
||||
it('should allow writing in a pod that is not full yet.', async(): Promise<void> => {
|
||||
const testFile1 = `${pod2}/test1.txt`;
|
||||
|
||||
const response1 = performSimplePutWithLength(testFile1, 2000);
|
||||
await expect(response1).resolves.toBeDefined();
|
||||
expect((await response1).status).toEqual(201);
|
||||
});
|
||||
|
||||
// Both pods should not accept this request anymore
|
||||
it('should block PUT requests to different pods if their quota is exceeded.', async(): Promise<void> => {
|
||||
const testFile1 = `${pod1}/test2.txt`;
|
||||
const testFile2 = `${pod2}/test2.txt`;
|
||||
|
||||
const response1 = performSimplePutWithLength(testFile1, 2500);
|
||||
await expect(response1).resolves.toBeDefined();
|
||||
expect((await response1).status).toEqual(413);
|
||||
|
||||
const response2 = performSimplePutWithLength(testFile2, 2500);
|
||||
await expect(response2).resolves.toBeDefined();
|
||||
expect((await response2).status).toEqual(413);
|
||||
});
|
||||
});
|
||||
|
||||
/** Test the general functionality of the server using global quota */
|
||||
describe('with global quota enabled', (): void => {
|
||||
const port = getPort('GlobalQuota');
|
||||
const baseUrl = `http://localhost:${port}/`;
|
||||
const pod1 = `${baseUrl}${podName1}`;
|
||||
const pod2 = `${baseUrl}${podName2}`;
|
||||
const rootFilePath = getTestFolder('quota-global');
|
||||
|
||||
let app: App;
|
||||
|
||||
beforeAll(async(): Promise<void> => {
|
||||
// Calculate the allowed quota depending on file system used
|
||||
const size = (folderSizeTest.size * 3) + 4000;
|
||||
|
||||
const instances = await instantiateFromConfig(
|
||||
'urn:solid-server:test:Instances',
|
||||
getTestConfigPath('quota-global.json'),
|
||||
{
|
||||
...getDefaultVariables(port, baseUrl),
|
||||
'urn:solid-server:default:variable:rootFilePath': rootFilePath,
|
||||
'urn:solid-server:default:variable:GlobalQuota': size,
|
||||
},
|
||||
) as Record<string, any>;
|
||||
({ app } = instances);
|
||||
await app.start();
|
||||
|
||||
// Initialize 2 pods
|
||||
await registerTestPods(baseUrl, [ podName1, podName2 ]);
|
||||
await clearInitialFiles(rootFilePath, [ podName1, podName2 ]);
|
||||
});
|
||||
|
||||
afterAll(async(): Promise<void> => {
|
||||
await app.stop();
|
||||
await removeFolder(rootFilePath);
|
||||
});
|
||||
|
||||
it('should return 413 when global quota is exceeded.', async(): Promise<void> => {
|
||||
const testFile1 = `${baseUrl}test1.txt`;
|
||||
const testFile2 = `${baseUrl}test2.txt`;
|
||||
|
||||
const response1 = performSimplePutWithLength(testFile1, 2000);
|
||||
await expect(response1).resolves.toBeDefined();
|
||||
const awaitedRes1 = await response1;
|
||||
expect(awaitedRes1.status).toEqual(201);
|
||||
|
||||
const response2 = performSimplePutWithLength(testFile2, 2500);
|
||||
await expect(response2).resolves.toBeDefined();
|
||||
const awaitedRes2 = await response2;
|
||||
expect(awaitedRes2.status).toEqual(413);
|
||||
});
|
||||
|
||||
it('should return 413 when trying to write to any pod when global quota is exceeded.', async(): Promise<void> => {
|
||||
const testFile1 = `${pod1}/test3.txt`;
|
||||
const testFile2 = `${pod2}/test4.txt`;
|
||||
|
||||
const response1 = performSimplePutWithLength(testFile1, 2500);
|
||||
await expect(response1).resolves.toBeDefined();
|
||||
const awaitedRes1 = await response1;
|
||||
expect(awaitedRes1.status).toEqual(413);
|
||||
|
||||
const response2 = performSimplePutWithLength(testFile2, 2500);
|
||||
await expect(response2).resolves.toBeDefined();
|
||||
const awaitedRes2 = await response2;
|
||||
expect(awaitedRes2.status).toEqual(413);
|
||||
});
|
||||
});
|
||||
});
|
||||
65
test/integration/config/quota-global.json
Normal file
65
test/integration/config/quota-global.json
Normal file
@@ -0,0 +1,65 @@
|
||||
{
|
||||
"@context": "https://linkedsoftwaredependencies.org/bundles/npm/@solid/community-server/^2.0.0/components/context.jsonld",
|
||||
"import": [
|
||||
"files-scs:config/app/main/default.json",
|
||||
"files-scs:config/app/init/initialize-root.json",
|
||||
"files-scs:config/app/setup/disabled.json",
|
||||
"files-scs:config/http/handler/default.json",
|
||||
"files-scs:config/http/middleware/websockets.json",
|
||||
"files-scs:config/http/server-factory/websockets.json",
|
||||
"files-scs:config/http/static/default.json",
|
||||
"files-scs:config/identity/access/public.json",
|
||||
"files-scs:config/identity/email/default.json",
|
||||
"files-scs:config/identity/handler/default.json",
|
||||
"files-scs:config/identity/ownership/token.json",
|
||||
"files-scs:config/identity/pod/static.json",
|
||||
"files-scs:config/identity/registration/enabled.json",
|
||||
"files-scs:config/ldp/authentication/dpop-bearer.json",
|
||||
"files-scs:config/ldp/authorization/allow-all.json",
|
||||
"files-scs:config/ldp/handler/default.json",
|
||||
"files-scs:config/ldp/metadata-parser/default.json",
|
||||
"files-scs:config/ldp/metadata-writer/default.json",
|
||||
"files-scs:config/ldp/modes/default.json",
|
||||
"files-scs:config/storage/backend/global-quota-file.json",
|
||||
"files-scs:config/storage/key-value/resource-store.json",
|
||||
"files-scs:config/storage/middleware/default.json",
|
||||
"files-scs:config/util/auxiliary/acl.json",
|
||||
"files-scs:config/util/identifiers/suffix.json",
|
||||
"files-scs:config/util/index/default.json",
|
||||
"files-scs:config/util/logging/winston.json",
|
||||
"files-scs:config/util/representation-conversion/default.json",
|
||||
"files-scs:config/util/resource-locker/memory.json",
|
||||
"files-scs:config/util/variables/default.json"
|
||||
],
|
||||
"@graph": [
|
||||
{
|
||||
"comment": "A single-pod server that stores its resources on disk while enforcing quota."
|
||||
},
|
||||
{
|
||||
"comment": "The set quota enforced globally",
|
||||
"@id": "urn:solid-server:default:variable:GlobalQuota",
|
||||
"@type": "Variable"
|
||||
},
|
||||
{
|
||||
"@id": "urn:solid-server:default:QuotaStrategy",
|
||||
"GlobalQuotaStrategy:_limit_amount": {
|
||||
"@id": "urn:solid-server:default:variable:GlobalQuota"
|
||||
},
|
||||
"GlobalQuotaStrategy:_limit_unit": "bytes"
|
||||
},
|
||||
{
|
||||
"@id": "urn:solid-server:default:SizeReporter",
|
||||
"FileSizeReporter:_ignoreFolders": [ "^/\\.internal$" ]
|
||||
},
|
||||
{
|
||||
"@id": "urn:solid-server:test:Instances",
|
||||
"@type": "RecordObject",
|
||||
"record": [
|
||||
{
|
||||
"RecordObject:_record_key": "app",
|
||||
"RecordObject:_record_value": { "@id": "urn:solid-server:default:App" }
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
61
test/integration/config/quota-pod.json
Normal file
61
test/integration/config/quota-pod.json
Normal file
@@ -0,0 +1,61 @@
|
||||
{
|
||||
"@context": "https://linkedsoftwaredependencies.org/bundles/npm/@solid/community-server/^2.0.0/components/context.jsonld",
|
||||
"import": [
|
||||
"files-scs:config/app/main/default.json",
|
||||
"files-scs:config/app/init/initialize-root.json",
|
||||
"files-scs:config/app/setup/disabled.json",
|
||||
"files-scs:config/http/handler/default.json",
|
||||
"files-scs:config/http/middleware/websockets.json",
|
||||
"files-scs:config/http/server-factory/websockets.json",
|
||||
"files-scs:config/http/static/default.json",
|
||||
"files-scs:config/identity/access/public.json",
|
||||
"files-scs:config/identity/email/default.json",
|
||||
"files-scs:config/identity/handler/default.json",
|
||||
"files-scs:config/identity/ownership/token.json",
|
||||
"files-scs:config/identity/pod/static.json",
|
||||
"files-scs:config/identity/registration/enabled.json",
|
||||
"files-scs:config/ldp/authentication/dpop-bearer.json",
|
||||
"files-scs:config/ldp/authorization/allow-all.json",
|
||||
"files-scs:config/ldp/handler/default.json",
|
||||
"files-scs:config/ldp/metadata-parser/default.json",
|
||||
"files-scs:config/ldp/metadata-writer/default.json",
|
||||
"files-scs:config/ldp/modes/default.json",
|
||||
"files-scs:config/storage/backend/pod-quota-file.json",
|
||||
"files-scs:config/storage/key-value/resource-store.json",
|
||||
"files-scs:config/storage/middleware/default.json",
|
||||
"files-scs:config/util/auxiliary/acl.json",
|
||||
"files-scs:config/util/identifiers/suffix.json",
|
||||
"files-scs:config/util/index/default.json",
|
||||
"files-scs:config/util/logging/winston.json",
|
||||
"files-scs:config/util/representation-conversion/default.json",
|
||||
"files-scs:config/util/resource-locker/memory.json",
|
||||
"files-scs:config/util/variables/default.json"
|
||||
],
|
||||
"@graph": [
|
||||
{
|
||||
"comment": "A single-pod server that stores its resources on disk while enforcing quota."
|
||||
},
|
||||
{
|
||||
"comment": "The set quota enforced per pod",
|
||||
"@id": "urn:solid-server:default:variable:PodQuota",
|
||||
"@type": "Variable"
|
||||
},
|
||||
{
|
||||
"@id": "urn:solid-server:default:QuotaStrategy",
|
||||
"PodQuotaStrategy:_limit_amount": {
|
||||
"@id": "urn:solid-server:default:variable:PodQuota"
|
||||
},
|
||||
"PodQuotaStrategy:_limit_unit": "bytes"
|
||||
},
|
||||
{
|
||||
"@id": "urn:solid-server:test:Instances",
|
||||
"@type": "RecordObject",
|
||||
"record": [
|
||||
{
|
||||
"RecordObject:_record_key": "app",
|
||||
"RecordObject:_record_value": { "@id": "urn:solid-server:default:App" }
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
@@ -61,10 +61,10 @@ describe('A ComposedAuxiliaryStrategy', (): void => {
|
||||
});
|
||||
|
||||
it('validates data through the Validator.', async(): Promise<void> => {
|
||||
const representation = { data: 'data!' } as any;
|
||||
const representation = { data: 'data!', metadata: { identifier: { value: 'any' }}} as any;
|
||||
await expect(strategy.validate(representation)).resolves.toBeUndefined();
|
||||
expect(validator.handleSafe).toHaveBeenCalledTimes(1);
|
||||
expect(validator.handleSafe).toHaveBeenLastCalledWith(representation);
|
||||
expect(validator.handleSafe).toHaveBeenLastCalledWith({ representation, identifier: { path: 'any' }});
|
||||
});
|
||||
|
||||
it('defaults isRequiredInRoot to false.', async(): Promise<void> => {
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
import { RdfValidator } from '../../../../src/http/auxiliary/RdfValidator';
|
||||
import { BasicRepresentation } from '../../../../src/http/representation/BasicRepresentation';
|
||||
import type { ResourceIdentifier } from '../../../../src/http/representation/ResourceIdentifier';
|
||||
import type { RepresentationConverter } from '../../../../src/storage/conversion/RepresentationConverter';
|
||||
import { readableToString } from '../../../../src/util/StreamUtil';
|
||||
import { StaticAsyncHandler } from '../../../util/StaticAsyncHandler';
|
||||
@@ -8,6 +9,7 @@ import 'jest-rdf';
|
||||
describe('An RdfValidator', (): void => {
|
||||
let converter: RepresentationConverter;
|
||||
let validator: RdfValidator;
|
||||
const identifier: ResourceIdentifier = { path: 'any/path' };
|
||||
|
||||
beforeEach(async(): Promise<void> => {
|
||||
converter = new StaticAsyncHandler<any>(true, null);
|
||||
@@ -20,14 +22,15 @@ describe('An RdfValidator', (): void => {
|
||||
|
||||
it('always accepts content-type internal/quads.', async(): Promise<void> => {
|
||||
const representation = new BasicRepresentation('data', 'internal/quads');
|
||||
await expect(validator.handle(representation)).resolves.toBeUndefined();
|
||||
await expect(validator.handle({ representation, identifier })).resolves.toEqual(representation);
|
||||
});
|
||||
|
||||
it('validates data by running it through a converter.', async(): Promise<void> => {
|
||||
converter.handleSafe = jest.fn().mockResolvedValue(new BasicRepresentation('transformedData', 'wrongType'));
|
||||
const representation = new BasicRepresentation('data', 'content-type');
|
||||
const quads = representation.metadata.quads();
|
||||
await expect(validator.handle(representation)).resolves.toBeUndefined();
|
||||
// Output is not important for this Validator
|
||||
await expect(validator.handle({ representation, identifier })).resolves.toBeDefined();
|
||||
// Make sure the data can still be streamed
|
||||
await expect(readableToString(representation.data)).resolves.toBe('data');
|
||||
// Make sure the metadata was not changed
|
||||
@@ -37,7 +40,7 @@ describe('An RdfValidator', (): void => {
|
||||
it('throws an error when validating invalid data.', async(): Promise<void> => {
|
||||
converter.handleSafe = jest.fn().mockRejectedValue(new Error('bad data!'));
|
||||
const representation = new BasicRepresentation('data', 'content-type');
|
||||
await expect(validator.handle(representation)).rejects.toThrow('bad data!');
|
||||
await expect(validator.handle({ representation, identifier })).rejects.toThrow('bad data!');
|
||||
// Make sure the data on the readable has not been reset
|
||||
expect(representation.data.destroyed).toBe(true);
|
||||
});
|
||||
|
||||
32
test/unit/http/input/metadata/ContentLengthParser.test.ts
Normal file
32
test/unit/http/input/metadata/ContentLengthParser.test.ts
Normal file
@@ -0,0 +1,32 @@
|
||||
import { ContentLengthParser } from '../../../../../src/http/input/metadata/ContentLengthParser';
|
||||
import { RepresentationMetadata } from '../../../../../src/http/representation/RepresentationMetadata';
|
||||
import type { HttpRequest } from '../../../../../src/server/HttpRequest';
|
||||
|
||||
describe('A ContentLengthParser', (): void => {
|
||||
const parser = new ContentLengthParser();
|
||||
let request: HttpRequest;
|
||||
let metadata: RepresentationMetadata;
|
||||
|
||||
beforeEach(async(): Promise<void> => {
|
||||
request = { headers: {}} as HttpRequest;
|
||||
metadata = new RepresentationMetadata();
|
||||
});
|
||||
|
||||
it('does nothing if there is no content-length header.', async(): Promise<void> => {
|
||||
await expect(parser.handle({ request, metadata })).resolves.toBeUndefined();
|
||||
expect(metadata.quads()).toHaveLength(0);
|
||||
});
|
||||
|
||||
it('sets the given content-length as metadata.', async(): Promise<void> => {
|
||||
request.headers['content-length'] = '50';
|
||||
await expect(parser.handle({ request, metadata })).resolves.toBeUndefined();
|
||||
expect(metadata.quads()).toHaveLength(1);
|
||||
expect(metadata.contentLength).toBe(50);
|
||||
});
|
||||
|
||||
it('does not set a content-length when the header is invalid.', async(): Promise<void> => {
|
||||
request.headers['content-length'] = 'aabbcc50ccbbaa';
|
||||
await expect(parser.handle({ request, metadata })).resolves.toBeUndefined();
|
||||
expect(metadata.quads()).toHaveLength(0);
|
||||
});
|
||||
});
|
||||
@@ -60,6 +60,16 @@ describe('A RepresentationMetadata', (): void => {
|
||||
expect(metadata.contentType).toEqual('text/turtle');
|
||||
});
|
||||
|
||||
it('stores the content-length correctly.', async(): Promise<void> => {
|
||||
metadata = new RepresentationMetadata();
|
||||
metadata.contentLength = 50;
|
||||
expect(metadata.contentLength).toEqual(50);
|
||||
|
||||
metadata = new RepresentationMetadata();
|
||||
metadata.contentLength = undefined;
|
||||
expect(metadata.contentLength).toBeUndefined();
|
||||
});
|
||||
|
||||
it('copies an other metadata object.', async(): Promise<void> => {
|
||||
const other = new RepresentationMetadata({ path: 'otherId' }, { 'test:pred': 'objVal' });
|
||||
metadata = new RepresentationMetadata(other);
|
||||
|
||||
37
test/unit/quota/GlobalQuotaStrategy.test.ts
Normal file
37
test/unit/quota/GlobalQuotaStrategy.test.ts
Normal file
@@ -0,0 +1,37 @@
|
||||
import type { ResourceIdentifier } from '../../../src/http/representation/ResourceIdentifier';
|
||||
import { GlobalQuotaStrategy } from '../../../src/storage/quota/GlobalQuotaStrategy';
|
||||
import { UNIT_BYTES } from '../../../src/storage/size-reporter/Size';
|
||||
import type { Size } from '../../../src/storage/size-reporter/Size';
|
||||
import type { SizeReporter } from '../../../src/storage/size-reporter/SizeReporter';
|
||||
|
||||
describe('GlobalQuotaStrategy', (): void => {
|
||||
let strategy: GlobalQuotaStrategy;
|
||||
let mockSize: Size;
|
||||
let mockReporter: jest.Mocked<SizeReporter<any>>;
|
||||
let mockBase: string;
|
||||
|
||||
beforeEach((): void => {
|
||||
mockSize = { amount: 2000, unit: UNIT_BYTES };
|
||||
mockBase = '';
|
||||
mockReporter = {
|
||||
getSize: jest.fn(async(identifier: ResourceIdentifier): Promise<Size> => ({
|
||||
unit: mockSize.unit,
|
||||
// This mock will return 1000 as size of the root and 50 for any other resource
|
||||
amount: identifier.path === mockBase ? 1000 : 50,
|
||||
})),
|
||||
getUnit: jest.fn().mockReturnValue(mockSize.unit),
|
||||
calculateChunkSize: jest.fn(async(chunk: any): Promise<number> => chunk.length),
|
||||
estimateSize: jest.fn().mockResolvedValue(5),
|
||||
};
|
||||
strategy = new GlobalQuotaStrategy(mockSize, mockReporter, mockBase);
|
||||
});
|
||||
|
||||
describe('getAvailableSpace()', (): void => {
|
||||
it('should return the correct amount of available space left.', async(): Promise<void> => {
|
||||
const result = strategy.getAvailableSpace({ path: 'any/path' });
|
||||
await expect(result).resolves.toEqual(
|
||||
expect.objectContaining({ amount: mockSize.amount - 950 }),
|
||||
);
|
||||
});
|
||||
});
|
||||
});
|
||||
77
test/unit/quota/PodQuotaStrategy.test.ts
Normal file
77
test/unit/quota/PodQuotaStrategy.test.ts
Normal file
@@ -0,0 +1,77 @@
|
||||
import { RepresentationMetadata } from '../../../src/http/representation/RepresentationMetadata';
|
||||
import type { ResourceIdentifier } from '../../../src/http/representation/ResourceIdentifier';
|
||||
import type { DataAccessor } from '../../../src/storage/accessors/DataAccessor';
|
||||
import { PodQuotaStrategy } from '../../../src/storage/quota/PodQuotaStrategy';
|
||||
import { UNIT_BYTES } from '../../../src/storage/size-reporter/Size';
|
||||
import type { Size } from '../../../src/storage/size-reporter/Size';
|
||||
import type { SizeReporter } from '../../../src/storage/size-reporter/SizeReporter';
|
||||
import { NotFoundHttpError } from '../../../src/util/errors/NotFoundHttpError';
|
||||
import type { IdentifierStrategy } from '../../../src/util/identifiers/IdentifierStrategy';
|
||||
import { SingleRootIdentifierStrategy } from '../../../src/util/identifiers/SingleRootIdentifierStrategy';
|
||||
import { PIM, RDF } from '../../../src/util/Vocabularies';
|
||||
import { mockFs } from '../../util/Util';
|
||||
|
||||
jest.mock('fs');
|
||||
|
||||
describe('PodQuotaStrategy', (): void => {
|
||||
let strategy: PodQuotaStrategy;
|
||||
let mockSize: Size;
|
||||
let mockReporter: jest.Mocked<SizeReporter<any>>;
|
||||
let identifierStrategy: IdentifierStrategy;
|
||||
let accessor: jest.Mocked<DataAccessor>;
|
||||
const base = 'http://localhost:3000/';
|
||||
const rootFilePath = 'folder';
|
||||
|
||||
beforeEach((): void => {
|
||||
jest.restoreAllMocks();
|
||||
mockFs(rootFilePath, new Date());
|
||||
mockSize = { amount: 2000, unit: UNIT_BYTES };
|
||||
identifierStrategy = new SingleRootIdentifierStrategy(base);
|
||||
mockReporter = {
|
||||
getSize: jest.fn().mockResolvedValue({ unit: mockSize.unit, amount: 50 }),
|
||||
getUnit: jest.fn().mockReturnValue(mockSize.unit),
|
||||
calculateChunkSize: jest.fn(async(chunk: any): Promise<number> => chunk.length),
|
||||
estimateSize: jest.fn().mockResolvedValue(5),
|
||||
};
|
||||
accessor = {
|
||||
// Assume that the pod is called "nested"
|
||||
getMetadata: jest.fn().mockImplementation(
|
||||
async(identifier: ResourceIdentifier): Promise<RepresentationMetadata> => {
|
||||
const res = new RepresentationMetadata();
|
||||
if (identifier.path === `${base}nested/`) {
|
||||
res.add(RDF.type, PIM.Storage);
|
||||
}
|
||||
return res;
|
||||
},
|
||||
),
|
||||
} as any;
|
||||
strategy = new PodQuotaStrategy(mockSize, mockReporter, identifierStrategy, accessor);
|
||||
});
|
||||
|
||||
describe('getAvailableSpace()', (): void => {
|
||||
it('should return a Size containing MAX_SAFE_INTEGER when writing outside a pod.', async(): Promise<void> => {
|
||||
const result = strategy.getAvailableSpace({ path: `${base}file.txt` });
|
||||
await expect(result).resolves.toEqual(expect.objectContaining({ amount: Number.MAX_SAFE_INTEGER }));
|
||||
});
|
||||
it('should ignore the size of the existing resource when writing inside a pod.', async(): Promise<void> => {
|
||||
const result = strategy.getAvailableSpace({ path: `${base}nested/nested2/file.txt` });
|
||||
await expect(result).resolves.toEqual(expect.objectContaining({ amount: mockSize.amount }));
|
||||
expect(mockReporter.getSize).toHaveBeenCalledTimes(2);
|
||||
});
|
||||
it('should return a Size containing the available space when writing inside a pod.', async(): Promise<void> => {
|
||||
accessor.getMetadata.mockImplementationOnce((): any => {
|
||||
throw new NotFoundHttpError();
|
||||
});
|
||||
const result = strategy.getAvailableSpace({ path: `${base}nested/nested2/file.txt` });
|
||||
await expect(result).resolves.toEqual(expect.objectContaining({ amount: mockSize.amount }));
|
||||
expect(mockReporter.getSize).toHaveBeenCalledTimes(2);
|
||||
});
|
||||
it('should throw when looking for pim:Storage errors.', async(): Promise<void> => {
|
||||
accessor.getMetadata.mockImplementationOnce((): any => {
|
||||
throw new Error('error');
|
||||
});
|
||||
const result = strategy.getAvailableSpace({ path: `${base}nested/nested2/file.txt` });
|
||||
await expect(result).rejects.toThrow('error');
|
||||
});
|
||||
});
|
||||
});
|
||||
88
test/unit/quota/QuotaStrategy.test.ts
Normal file
88
test/unit/quota/QuotaStrategy.test.ts
Normal file
@@ -0,0 +1,88 @@
|
||||
import { RepresentationMetadata } from '../../../src/http/representation/RepresentationMetadata';
|
||||
import { QuotaStrategy } from '../../../src/storage/quota/QuotaStrategy';
|
||||
import { UNIT_BYTES } from '../../../src/storage/size-reporter/Size';
|
||||
import type { Size } from '../../../src/storage/size-reporter/Size';
|
||||
import type { SizeReporter } from '../../../src/storage/size-reporter/SizeReporter';
|
||||
import { guardedStreamFrom, pipeSafely } from '../../../src/util/StreamUtil';
|
||||
import { mockFs } from '../../util/Util';
|
||||
|
||||
jest.mock('fs');
|
||||
|
||||
class QuotaStrategyWrapper extends QuotaStrategy {
|
||||
public constructor(reporter: SizeReporter<any>, limit: Size) {
|
||||
super(reporter, limit);
|
||||
}
|
||||
|
||||
public getAvailableSpace = async(): Promise<Size> => ({ unit: UNIT_BYTES, amount: 5 });
|
||||
protected getTotalSpaceUsed = async(): Promise<Size> => ({ unit: UNIT_BYTES, amount: 5 });
|
||||
}
|
||||
|
||||
describe('A QuotaStrategy', (): void => {
|
||||
let strategy: QuotaStrategyWrapper;
|
||||
let mockSize: Size;
|
||||
let mockReporter: jest.Mocked<SizeReporter<any>>;
|
||||
const base = 'http://localhost:3000/';
|
||||
const rootFilePath = 'folder';
|
||||
|
||||
beforeEach((): void => {
|
||||
jest.restoreAllMocks();
|
||||
mockFs(rootFilePath, new Date());
|
||||
mockSize = { amount: 2000, unit: UNIT_BYTES };
|
||||
mockReporter = {
|
||||
getSize: jest.fn().mockResolvedValue({ unit: mockSize.unit, amount: 50 }),
|
||||
getUnit: jest.fn().mockReturnValue(mockSize.unit),
|
||||
calculateChunkSize: jest.fn(async(chunk: any): Promise<number> => chunk.length),
|
||||
estimateSize: jest.fn().mockResolvedValue(5),
|
||||
};
|
||||
strategy = new QuotaStrategyWrapper(mockReporter, mockSize);
|
||||
});
|
||||
|
||||
describe('constructor()', (): void => {
|
||||
it('should set the passed parameters as properties.', async(): Promise<void> => {
|
||||
expect(strategy.limit).toEqual(mockSize);
|
||||
expect(strategy.reporter).toEqual(mockReporter);
|
||||
});
|
||||
});
|
||||
|
||||
describe('estimateSize()', (): void => {
|
||||
it('should return a Size object containing the correct unit and amount.', async(): Promise<void> => {
|
||||
await expect(strategy.estimateSize(new RepresentationMetadata())).resolves.toEqual(
|
||||
// This '5' comes from the reporter mock a little up in this file
|
||||
expect.objectContaining({ unit: mockSize.unit, amount: 5 }),
|
||||
);
|
||||
});
|
||||
it('should return undefined when the reporter returns undefined.', async(): Promise<void> => {
|
||||
mockReporter.estimateSize.mockResolvedValueOnce(undefined);
|
||||
await expect(strategy.estimateSize(new RepresentationMetadata())).resolves.toBeUndefined();
|
||||
});
|
||||
});
|
||||
|
||||
describe('createQuotaGuard()', (): void => {
|
||||
it('should return a passthrough that destroys the stream when quota is exceeded.', async(): Promise<void> => {
|
||||
strategy.getAvailableSpace = jest.fn().mockReturnValue({ amount: 50, unit: mockSize.unit });
|
||||
const fiftyChars = 'A'.repeat(50);
|
||||
const stream = guardedStreamFrom(fiftyChars);
|
||||
const track = await strategy.createQuotaGuard({ path: `${base}nested/file2.txt` });
|
||||
const piped = pipeSafely(stream, track);
|
||||
|
||||
for (let i = 0; i < 10; i++) {
|
||||
stream.push(fiftyChars);
|
||||
}
|
||||
|
||||
expect(piped.destroyed).toBe(false);
|
||||
|
||||
for (let i = 0; i < 10; i++) {
|
||||
stream.push(fiftyChars);
|
||||
}
|
||||
|
||||
expect(piped.destroyed).toBe(false);
|
||||
|
||||
stream.push(fiftyChars);
|
||||
|
||||
const destroy = new Promise<void>((resolve): void => {
|
||||
piped.on('error', (): void => resolve());
|
||||
});
|
||||
await expect(destroy).resolves.toBeUndefined();
|
||||
});
|
||||
});
|
||||
});
|
||||
97
test/unit/storage/accessors/AtomicFileDataAccessor.test.ts
Normal file
97
test/unit/storage/accessors/AtomicFileDataAccessor.test.ts
Normal file
@@ -0,0 +1,97 @@
|
||||
import 'jest-rdf';
|
||||
import type { Readable } from 'stream';
|
||||
import { RepresentationMetadata } from '../../../../src/http/representation/RepresentationMetadata';
|
||||
import { AtomicFileDataAccessor } from '../../../../src/storage/accessors/AtomicFileDataAccessor';
|
||||
import { ExtensionBasedMapper } from '../../../../src/storage/mapping/ExtensionBasedMapper';
|
||||
import { APPLICATION_OCTET_STREAM } from '../../../../src/util/ContentTypes';
|
||||
import type { Guarded } from '../../../../src/util/GuardedStream';
|
||||
import { guardedStreamFrom } from '../../../../src/util/StreamUtil';
|
||||
import { CONTENT_TYPE } from '../../../../src/util/Vocabularies';
|
||||
import { mockFs } from '../../../util/Util';
|
||||
|
||||
jest.mock('fs');
|
||||
|
||||
describe('AtomicFileDataAccessor', (): void => {
|
||||
const rootFilePath = 'uploads';
|
||||
const base = 'http://test.com/';
|
||||
let accessor: AtomicFileDataAccessor;
|
||||
let cache: { data: any };
|
||||
let metadata: RepresentationMetadata;
|
||||
let data: Guarded<Readable>;
|
||||
|
||||
beforeEach(async(): Promise<void> => {
|
||||
cache = mockFs(rootFilePath, new Date());
|
||||
accessor = new AtomicFileDataAccessor(
|
||||
new ExtensionBasedMapper(base, rootFilePath),
|
||||
rootFilePath,
|
||||
'./.internal/tempFiles/',
|
||||
);
|
||||
// The 'mkdirSync' in AtomicFileDataAccessor's constructor does not seem to create the folder in the
|
||||
// cache object used for mocking fs.
|
||||
// This line creates what represents a folder in the cache object
|
||||
cache.data['.internal'] = { tempFiles: {}};
|
||||
metadata = new RepresentationMetadata(APPLICATION_OCTET_STREAM);
|
||||
data = guardedStreamFrom([ 'data' ]);
|
||||
});
|
||||
|
||||
describe('writing a document', (): void => {
|
||||
it('writes the data to the corresponding file.', async(): Promise<void> => {
|
||||
await expect(accessor.writeDocument({ path: `${base}resource` }, data, metadata)).resolves.toBeUndefined();
|
||||
expect(cache.data.resource).toBe('data');
|
||||
});
|
||||
|
||||
it('writes metadata to the corresponding metadata file.', async(): Promise<void> => {
|
||||
metadata = new RepresentationMetadata({ path: `${base}res.ttl` },
|
||||
{ [CONTENT_TYPE]: 'text/turtle', likes: 'apples' });
|
||||
await expect(accessor.writeDocument({ path: `${base}res.ttl` }, data, metadata)).resolves.toBeUndefined();
|
||||
expect(cache.data['res.ttl']).toBe('data');
|
||||
expect(cache.data['res.ttl.meta']).toMatch(`<${base}res.ttl> <likes> "apples".`);
|
||||
});
|
||||
|
||||
it('should delete temp file when done writing.', async(): Promise<void> => {
|
||||
await expect(accessor.writeDocument({ path: `${base}resource` }, data, metadata)).resolves.toBeUndefined();
|
||||
expect(Object.keys(cache.data['.internal'].tempFiles)).toHaveLength(0);
|
||||
expect(cache.data.resource).toBe('data');
|
||||
});
|
||||
|
||||
it('should throw an error when writing the data goes wrong.', async(): Promise<void> => {
|
||||
data.read = jest.fn((): any => {
|
||||
data.emit('error', new Error('error'));
|
||||
return null;
|
||||
});
|
||||
jest.requireMock('fs').promises.stat = jest.fn((): any => ({
|
||||
isFile: (): boolean => false,
|
||||
}));
|
||||
await expect(accessor.writeDocument({ path: `${base}res.ttl` }, data, metadata)).rejects.toThrow('error');
|
||||
});
|
||||
|
||||
it('should throw when renaming / moving the file goes wrong.', async(): Promise<void> => {
|
||||
jest.requireMock('fs').promises.rename = jest.fn((): any => {
|
||||
throw new Error('error');
|
||||
});
|
||||
jest.requireMock('fs').promises.stat = jest.fn((): any => ({
|
||||
isFile: (): boolean => true,
|
||||
}));
|
||||
await expect(accessor.writeDocument({ path: `${base}res.ttl` }, data, metadata)).rejects.toThrow('error');
|
||||
});
|
||||
|
||||
it('should (on error) not unlink the temp file if it does not exist.', async(): Promise<void> => {
|
||||
jest.requireMock('fs').promises.rename = jest.fn((): any => {
|
||||
throw new Error('error');
|
||||
});
|
||||
jest.requireMock('fs').promises.stat = jest.fn((): any => ({
|
||||
isFile: (): boolean => false,
|
||||
}));
|
||||
await expect(accessor.writeDocument({ path: `${base}res.ttl` }, data, metadata)).rejects.toThrow('error');
|
||||
});
|
||||
|
||||
it('should throw when renaming / moving the file goes wrong and the temp file does not exist.',
|
||||
async(): Promise<void> => {
|
||||
jest.requireMock('fs').promises.rename = jest.fn((): any => {
|
||||
throw new Error('error');
|
||||
});
|
||||
jest.requireMock('fs').promises.stat = jest.fn();
|
||||
await expect(accessor.writeDocument({ path: `${base}res.ttl` }, data, metadata)).rejects.toThrow('error');
|
||||
});
|
||||
});
|
||||
});
|
||||
80
test/unit/storage/accessors/PassthroughDataAccessor.test.ts
Normal file
80
test/unit/storage/accessors/PassthroughDataAccessor.test.ts
Normal file
@@ -0,0 +1,80 @@
|
||||
import { BasicRepresentation } from '../../../../src/http/representation/BasicRepresentation';
|
||||
import { RepresentationMetadata } from '../../../../src/http/representation/RepresentationMetadata';
|
||||
import type { DataAccessor } from '../../../../src/storage/accessors/DataAccessor';
|
||||
import { PassthroughDataAccessor } from '../../../../src/storage/accessors/PassthroughDataAccessor';
|
||||
import { guardedStreamFrom } from '../../../../src/util/StreamUtil';
|
||||
|
||||
describe('ValidatingDataAccessor', (): void => {
|
||||
let passthrough: PassthroughDataAccessor;
|
||||
let childAccessor: jest.Mocked<DataAccessor>;
|
||||
|
||||
const mockIdentifier = { path: 'http://localhost/test.txt' };
|
||||
const mockMetadata = new RepresentationMetadata();
|
||||
const mockData = guardedStreamFrom('test string');
|
||||
const mockRepresentation = new BasicRepresentation(mockData, mockMetadata);
|
||||
|
||||
beforeEach(async(): Promise<void> => {
|
||||
jest.clearAllMocks();
|
||||
childAccessor = {
|
||||
canHandle: jest.fn(),
|
||||
writeDocument: jest.fn(),
|
||||
getData: jest.fn(),
|
||||
getChildren: jest.fn(),
|
||||
writeContainer: jest.fn(),
|
||||
deleteResource: jest.fn(),
|
||||
getMetadata: jest.fn(),
|
||||
};
|
||||
childAccessor.getChildren = jest.fn();
|
||||
passthrough = new PassthroughDataAccessor(childAccessor);
|
||||
});
|
||||
|
||||
describe('writeDocument()', (): void => {
|
||||
it('should call the accessors writeDocument() function.', async(): Promise<void> => {
|
||||
await passthrough.writeDocument(mockIdentifier, mockData, mockMetadata);
|
||||
expect(childAccessor.writeDocument).toHaveBeenCalledTimes(1);
|
||||
expect(childAccessor.writeDocument).toHaveBeenCalledWith(mockIdentifier, mockData, mockMetadata);
|
||||
});
|
||||
});
|
||||
describe('canHandle()', (): void => {
|
||||
it('should call the accessors canHandle() function.', async(): Promise<void> => {
|
||||
await passthrough.canHandle(mockRepresentation);
|
||||
expect(childAccessor.canHandle).toHaveBeenCalledTimes(1);
|
||||
expect(childAccessor.canHandle).toHaveBeenCalledWith(mockRepresentation);
|
||||
});
|
||||
});
|
||||
describe('getData()', (): void => {
|
||||
it('should call the accessors getData() function.', async(): Promise<void> => {
|
||||
await passthrough.getData(mockIdentifier);
|
||||
expect(childAccessor.getData).toHaveBeenCalledTimes(1);
|
||||
expect(childAccessor.getData).toHaveBeenCalledWith(mockIdentifier);
|
||||
});
|
||||
});
|
||||
describe('getMetadata()', (): void => {
|
||||
it('should call the accessors getMetadata() function.', async(): Promise<void> => {
|
||||
await passthrough.getMetadata(mockIdentifier);
|
||||
expect(childAccessor.getMetadata).toHaveBeenCalledTimes(1);
|
||||
expect(childAccessor.getMetadata).toHaveBeenCalledWith(mockIdentifier);
|
||||
});
|
||||
});
|
||||
describe('getChildren()', (): void => {
|
||||
it('should call the accessors getChildren() function.', async(): Promise<void> => {
|
||||
passthrough.getChildren(mockIdentifier);
|
||||
expect(childAccessor.getChildren).toHaveBeenCalledTimes(1);
|
||||
expect(childAccessor.getChildren).toHaveBeenCalledWith(mockIdentifier);
|
||||
});
|
||||
});
|
||||
describe('deleteResource()', (): void => {
|
||||
it('should call the accessors deleteResource() function.', async(): Promise<void> => {
|
||||
await passthrough.deleteResource(mockIdentifier);
|
||||
expect(childAccessor.deleteResource).toHaveBeenCalledTimes(1);
|
||||
expect(childAccessor.deleteResource).toHaveBeenCalledWith(mockIdentifier);
|
||||
});
|
||||
});
|
||||
describe('writeContainer()', (): void => {
|
||||
it('should call the accessors writeContainer() function.', async(): Promise<void> => {
|
||||
await passthrough.writeContainer(mockIdentifier, mockMetadata);
|
||||
expect(childAccessor.writeContainer).toHaveBeenCalledTimes(1);
|
||||
expect(childAccessor.writeContainer).toHaveBeenCalledWith(mockIdentifier, mockMetadata);
|
||||
});
|
||||
});
|
||||
});
|
||||
54
test/unit/storage/accessors/ValidatingDataAccessor.test.ts
Normal file
54
test/unit/storage/accessors/ValidatingDataAccessor.test.ts
Normal file
@@ -0,0 +1,54 @@
|
||||
import type { Validator, ValidatorInput } from '../../../../src/http/auxiliary/Validator';
|
||||
import { BasicRepresentation } from '../../../../src/http/representation/BasicRepresentation';
|
||||
import type { Representation } from '../../../../src/http/representation/Representation';
|
||||
import { RepresentationMetadata } from '../../../../src/http/representation/RepresentationMetadata';
|
||||
import type { DataAccessor } from '../../../../src/storage/accessors/DataAccessor';
|
||||
import { ValidatingDataAccessor } from '../../../../src/storage/accessors/ValidatingDataAccessor';
|
||||
import { guardedStreamFrom } from '../../../../src/util/StreamUtil';
|
||||
|
||||
describe('ValidatingDataAccessor', (): void => {
|
||||
let validatingAccessor: ValidatingDataAccessor;
|
||||
let childAccessor: jest.Mocked<DataAccessor>;
|
||||
let validator: jest.Mocked<Validator>;
|
||||
|
||||
const mockIdentifier = { path: 'http://localhost/test.txt' };
|
||||
const mockMetadata = new RepresentationMetadata();
|
||||
const mockData = guardedStreamFrom('test string');
|
||||
const mockRepresentation = new BasicRepresentation(mockData, mockMetadata);
|
||||
|
||||
beforeEach(async(): Promise<void> => {
|
||||
jest.clearAllMocks();
|
||||
childAccessor = {
|
||||
writeDocument: jest.fn(),
|
||||
writeContainer: jest.fn(),
|
||||
} as any;
|
||||
childAccessor.getChildren = jest.fn();
|
||||
validator = {
|
||||
handleSafe: jest.fn(async(input: ValidatorInput): Promise<Representation> => input.representation),
|
||||
} as any;
|
||||
validatingAccessor = new ValidatingDataAccessor(childAccessor, validator);
|
||||
});
|
||||
|
||||
describe('writeDocument()', (): void => {
|
||||
it('should call the validator\'s handleSafe() function.', async(): Promise<void> => {
|
||||
await validatingAccessor.writeDocument(mockIdentifier, mockData, mockMetadata);
|
||||
expect(validator.handleSafe).toHaveBeenCalledTimes(1);
|
||||
expect(validator.handleSafe).toHaveBeenCalledWith({
|
||||
representation: mockRepresentation,
|
||||
identifier: mockIdentifier,
|
||||
});
|
||||
});
|
||||
it('should call the accessors writeDocument() function.', async(): Promise<void> => {
|
||||
await validatingAccessor.writeDocument(mockIdentifier, mockData, mockMetadata);
|
||||
expect(childAccessor.writeDocument).toHaveBeenCalledTimes(1);
|
||||
expect(childAccessor.writeDocument).toHaveBeenCalledWith(mockIdentifier, mockData, mockMetadata);
|
||||
});
|
||||
});
|
||||
describe('writeContainer()', (): void => {
|
||||
it('should call the accessors writeContainer() function.', async(): Promise<void> => {
|
||||
await validatingAccessor.writeContainer(mockIdentifier, mockMetadata);
|
||||
expect(childAccessor.writeContainer).toHaveBeenCalledTimes(1);
|
||||
expect(childAccessor.writeContainer).toHaveBeenCalledWith(mockIdentifier, mockMetadata);
|
||||
});
|
||||
});
|
||||
});
|
||||
132
test/unit/storage/size-reporter/FileSizeReporter.test.ts
Normal file
132
test/unit/storage/size-reporter/FileSizeReporter.test.ts
Normal file
@@ -0,0 +1,132 @@
|
||||
import { promises as fsPromises } from 'fs';
|
||||
import { RepresentationMetadata } from '../../../../src/http/representation/RepresentationMetadata';
|
||||
import type { ResourceIdentifier } from '../../../../src/http/representation/ResourceIdentifier';
|
||||
import type { FileIdentifierMapper, ResourceLink } from '../../../../src/storage/mapping/FileIdentifierMapper';
|
||||
import { FileSizeReporter } from '../../../../src/storage/size-reporter/FileSizeReporter';
|
||||
import { UNIT_BYTES } from '../../../../src/storage/size-reporter/Size';
|
||||
import { joinFilePath } from '../../../../src/util/PathUtil';
|
||||
import { mockFs } from '../../../util/Util';
|
||||
|
||||
jest.mock('fs');
|
||||
|
||||
describe('A FileSizeReporter', (): void => {
|
||||
// Folder size is fixed to 4 in the mock
|
||||
const folderSize = 4;
|
||||
const mapper: jest.Mocked<FileIdentifierMapper> = {
|
||||
mapFilePathToUrl: jest.fn(),
|
||||
mapUrlToFilePath: jest.fn().mockImplementation((id: ResourceIdentifier): ResourceLink => ({
|
||||
filePath: id.path,
|
||||
identifier: id,
|
||||
isMetadata: false,
|
||||
})),
|
||||
};
|
||||
const fileRoot = joinFilePath(process.cwd(), '/test-folder/');
|
||||
const fileSizeReporter = new FileSizeReporter(
|
||||
mapper,
|
||||
fileRoot,
|
||||
[ '^/\\.internal$' ],
|
||||
);
|
||||
|
||||
beforeEach(async(): Promise<void> => {
|
||||
mockFs(fileRoot);
|
||||
});
|
||||
|
||||
it('should work without the ignoreFolders constructor parameter.', async(): Promise<void> => {
|
||||
const tempFileSizeReporter = new FileSizeReporter(
|
||||
mapper,
|
||||
fileRoot,
|
||||
);
|
||||
|
||||
const testFile = joinFilePath(fileRoot, '/test.txt');
|
||||
await fsPromises.writeFile(testFile, 'A'.repeat(20));
|
||||
|
||||
const result = tempFileSizeReporter.getSize({ path: testFile });
|
||||
await expect(result).resolves.toBeDefined();
|
||||
expect((await result).amount).toBe(20);
|
||||
});
|
||||
|
||||
it('should report the right file size.', async(): Promise<void> => {
|
||||
const testFile = joinFilePath(fileRoot, '/test.txt');
|
||||
await fsPromises.writeFile(testFile, 'A'.repeat(20));
|
||||
|
||||
const result = fileSizeReporter.getSize({ path: testFile });
|
||||
await expect(result).resolves.toBeDefined();
|
||||
expect((await result).amount).toBe(20);
|
||||
});
|
||||
|
||||
it('should work recursively.', async(): Promise<void> => {
|
||||
const containerFile = joinFilePath(fileRoot, '/test-folder-1/');
|
||||
await fsPromises.mkdir(containerFile, { recursive: true });
|
||||
const testFile = joinFilePath(containerFile, '/test.txt');
|
||||
await fsPromises.writeFile(testFile, 'A'.repeat(20));
|
||||
|
||||
const fileSize = fileSizeReporter.getSize({ path: testFile });
|
||||
const containerSize = fileSizeReporter.getSize({ path: containerFile });
|
||||
|
||||
await expect(fileSize).resolves.toEqual(expect.objectContaining({ amount: 20 }));
|
||||
await expect(containerSize).resolves.toEqual(expect.objectContaining({ amount: 20 + folderSize }));
|
||||
});
|
||||
|
||||
it('should not count files located in an ignored folder.', async(): Promise<void> => {
|
||||
const containerFile = joinFilePath(fileRoot, '/test-folder-2/');
|
||||
await fsPromises.mkdir(containerFile, { recursive: true });
|
||||
const testFile = joinFilePath(containerFile, '/test.txt');
|
||||
await fsPromises.writeFile(testFile, 'A'.repeat(20));
|
||||
|
||||
const internalContainerFile = joinFilePath(fileRoot, '/.internal/');
|
||||
await fsPromises.mkdir(internalContainerFile, { recursive: true });
|
||||
const internalTestFile = joinFilePath(internalContainerFile, '/test.txt');
|
||||
await fsPromises.writeFile(internalTestFile, 'A'.repeat(30));
|
||||
|
||||
const fileSize = fileSizeReporter.getSize({ path: testFile });
|
||||
const containerSize = fileSizeReporter.getSize({ path: containerFile });
|
||||
const rootSize = fileSizeReporter.getSize({ path: fileRoot });
|
||||
|
||||
const expectedFileSize = 20;
|
||||
const expectedContainerSize = 20 + folderSize;
|
||||
const expectedRootSize = expectedContainerSize + folderSize;
|
||||
|
||||
await expect(fileSize).resolves.toEqual(expect.objectContaining({ amount: expectedFileSize }));
|
||||
await expect(containerSize).resolves.toEqual(expect.objectContaining({ amount: expectedContainerSize }));
|
||||
await expect(rootSize).resolves.toEqual(expect.objectContaining({ amount: expectedRootSize }));
|
||||
});
|
||||
|
||||
it('should have the unit in its return value.', async(): Promise<void> => {
|
||||
const testFile = joinFilePath(fileRoot, '/test2.txt');
|
||||
await fsPromises.writeFile(testFile, 'A'.repeat(20));
|
||||
|
||||
const result = fileSizeReporter.getSize({ path: testFile });
|
||||
await expect(result).resolves.toBeDefined();
|
||||
expect((await result).unit).toBe(UNIT_BYTES);
|
||||
});
|
||||
|
||||
it('getUnit() should return UNIT_BYTES.', (): void => {
|
||||
expect(fileSizeReporter.getUnit()).toBe(UNIT_BYTES);
|
||||
});
|
||||
|
||||
it('should return 0 when the size of a non existent file is requested.', async(): Promise<void> => {
|
||||
const result = fileSizeReporter.getSize({ path: joinFilePath(fileRoot, '/test.txt') });
|
||||
await expect(result).resolves.toEqual(expect.objectContaining({ amount: 0 }));
|
||||
});
|
||||
|
||||
it('should calculate the chunk size correctly.', async(): Promise<void> => {
|
||||
const testString = 'testesttesttesttest==testtest';
|
||||
const result = fileSizeReporter.calculateChunkSize(testString);
|
||||
await expect(result).resolves.toEqual(testString.length);
|
||||
});
|
||||
|
||||
describe('estimateSize()', (): void => {
|
||||
it('should return the content-length.', async(): Promise<void> => {
|
||||
const metadata = new RepresentationMetadata();
|
||||
metadata.contentLength = 100;
|
||||
await expect(fileSizeReporter.estimateSize(metadata)).resolves.toEqual(100);
|
||||
});
|
||||
it(
|
||||
'should return undefined if no content-length is present in the metadata.',
|
||||
async(): Promise<void> => {
|
||||
const metadata = new RepresentationMetadata();
|
||||
await expect(fileSizeReporter.estimateSize(metadata)).resolves.toBeUndefined();
|
||||
},
|
||||
);
|
||||
});
|
||||
});
|
||||
120
test/unit/storage/validators/QuotaValidator.test.ts
Normal file
120
test/unit/storage/validators/QuotaValidator.test.ts
Normal file
@@ -0,0 +1,120 @@
|
||||
import type { Readable } from 'stream';
|
||||
import { PassThrough } from 'stream';
|
||||
import type { ValidatorInput } from '../../../../src/http/auxiliary/Validator';
|
||||
import { BasicRepresentation } from '../../../../src/http/representation/BasicRepresentation';
|
||||
import { RepresentationMetadata } from '../../../../src/http/representation/RepresentationMetadata';
|
||||
import type { ResourceIdentifier } from '../../../../src/http/representation/ResourceIdentifier';
|
||||
import type { QuotaStrategy } from '../../../../src/storage/quota/QuotaStrategy';
|
||||
import { UNIT_BYTES } from '../../../../src/storage/size-reporter/Size';
|
||||
import type { SizeReporter } from '../../../../src/storage/size-reporter/SizeReporter';
|
||||
import { QuotaValidator } from '../../../../src/storage/validators/QuotaValidator';
|
||||
import { guardStream } from '../../../../src/util/GuardedStream';
|
||||
import type { Guarded } from '../../../../src/util/GuardedStream';
|
||||
import { guardedStreamFrom, readableToString } from '../../../../src/util/StreamUtil';
|
||||
|
||||
describe('QuotaValidator', (): void => {
|
||||
let mockedStrategy: jest.Mocked<QuotaStrategy>;
|
||||
let validator: QuotaValidator;
|
||||
let identifier: ResourceIdentifier;
|
||||
let mockMetadata: RepresentationMetadata;
|
||||
let mockData: Guarded<Readable>;
|
||||
let mockInput: ValidatorInput;
|
||||
let mockReporter: jest.Mocked<SizeReporter<any>>;
|
||||
|
||||
beforeEach((): void => {
|
||||
jest.clearAllMocks();
|
||||
identifier = { path: 'http://localhost/' };
|
||||
mockMetadata = new RepresentationMetadata();
|
||||
mockData = guardedStreamFrom([ 'test string' ]);
|
||||
mockInput = {
|
||||
representation: new BasicRepresentation(mockData, mockMetadata),
|
||||
identifier,
|
||||
};
|
||||
mockReporter = {
|
||||
getSize: jest.fn(),
|
||||
getUnit: jest.fn(),
|
||||
calculateChunkSize: jest.fn(),
|
||||
estimateSize: jest.fn().mockResolvedValue(8),
|
||||
};
|
||||
mockedStrategy = {
|
||||
reporter: mockReporter,
|
||||
limit: { unit: UNIT_BYTES, amount: 8 },
|
||||
getAvailableSpace: jest.fn().mockResolvedValue({ unit: UNIT_BYTES, amount: 10 }),
|
||||
estimateSize: jest.fn().mockResolvedValue({ unit: UNIT_BYTES, amount: 8 }),
|
||||
createQuotaGuard: jest.fn().mockResolvedValue(guardStream(new PassThrough())),
|
||||
} as any;
|
||||
validator = new QuotaValidator(mockedStrategy);
|
||||
});
|
||||
|
||||
describe('handle()', (): void => {
|
||||
// Step 2
|
||||
it('should destroy the stream when estimated size is larger than the available size.', async(): Promise<void> => {
|
||||
mockedStrategy.estimateSize.mockResolvedValueOnce({ unit: UNIT_BYTES, amount: 11 });
|
||||
|
||||
const result = validator.handle(mockInput);
|
||||
await expect(result).resolves.toBeDefined();
|
||||
const awaitedResult = await result;
|
||||
|
||||
const prom = new Promise<void>((resolve, reject): void => {
|
||||
awaitedResult.data.on('error', (): void => resolve());
|
||||
awaitedResult.data.on('end', (): void => reject(new Error('reject')));
|
||||
});
|
||||
|
||||
// Consume the stream
|
||||
await expect(readableToString(awaitedResult.data))
|
||||
.rejects.toThrow('Quota exceeded: Advertised Content-Length is');
|
||||
await expect(prom).resolves.toBeUndefined();
|
||||
});
|
||||
|
||||
// Step 3
|
||||
it('should destroy the stream when quota is exceeded during write.', async(): Promise<void> => {
|
||||
mockedStrategy.createQuotaGuard.mockResolvedValueOnce(guardStream(new PassThrough({
|
||||
async transform(this): Promise<void> {
|
||||
this.destroy(new Error('error'));
|
||||
},
|
||||
})));
|
||||
|
||||
const result = validator.handle(mockInput);
|
||||
await expect(result).resolves.toBeDefined();
|
||||
const awaitedResult = await result;
|
||||
|
||||
const prom = new Promise<void>((resolve, reject): void => {
|
||||
awaitedResult.data.on('error', (): void => resolve());
|
||||
awaitedResult.data.on('end', (): void => reject(new Error('reject')));
|
||||
});
|
||||
|
||||
// Consume the stream
|
||||
await expect(readableToString(awaitedResult.data)).rejects.toThrow('error');
|
||||
expect(mockedStrategy.createQuotaGuard).toHaveBeenCalledTimes(1);
|
||||
await expect(prom).resolves.toBeUndefined();
|
||||
});
|
||||
|
||||
// Step 4
|
||||
it('should throw when quota were exceeded after stream was finished.', async(): Promise<void> => {
|
||||
const result = validator.handle(mockInput);
|
||||
|
||||
// Putting this after the handle / before consuming the stream will only effect
|
||||
// this function in the flush part of the code.
|
||||
mockedStrategy.getAvailableSpace.mockResolvedValueOnce({ unit: UNIT_BYTES, amount: -100 });
|
||||
|
||||
await expect(result).resolves.toBeDefined();
|
||||
const awaitedResult = await result;
|
||||
|
||||
const prom = new Promise<void>((resolve, reject): void => {
|
||||
awaitedResult.data.on('error', (): void => resolve());
|
||||
awaitedResult.data.on('end', (): void => reject(new Error('reject')));
|
||||
});
|
||||
|
||||
// Consume the stream
|
||||
await expect(readableToString(awaitedResult.data)).rejects.toThrow('Quota exceeded after write completed');
|
||||
await expect(prom).resolves.toBeUndefined();
|
||||
});
|
||||
|
||||
it('should return a stream that is consumable without error if quota isn\'t exceeded.', async(): Promise<void> => {
|
||||
const result = validator.handle(mockInput);
|
||||
await expect(result).resolves.toBeDefined();
|
||||
const awaitedResult = await result;
|
||||
await expect(readableToString(awaitedResult.data)).resolves.toBe('test string');
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -7,6 +7,7 @@ import { InternalServerError } from '../../../../src/util/errors/InternalServerE
|
||||
import { MethodNotAllowedHttpError } from '../../../../src/util/errors/MethodNotAllowedHttpError';
|
||||
import { NotFoundHttpError } from '../../../../src/util/errors/NotFoundHttpError';
|
||||
import { NotImplementedHttpError } from '../../../../src/util/errors/NotImplementedHttpError';
|
||||
import { PayloadHttpError } from '../../../../src/util/errors/PayloadHttpError';
|
||||
import { PreconditionFailedHttpError } from '../../../../src/util/errors/PreconditionFailedHttpError';
|
||||
import { UnauthorizedHttpError } from '../../../../src/util/errors/UnauthorizedHttpError';
|
||||
import { UnsupportedMediaTypeHttpError } from '../../../../src/util/errors/UnsupportedMediaTypeHttpError';
|
||||
@@ -27,6 +28,7 @@ describe('HttpError', (): void => {
|
||||
[ 'MethodNotAllowedHttpError', 405, MethodNotAllowedHttpError ],
|
||||
[ 'ConflictHttpError', 409, ConflictHttpError ],
|
||||
[ 'PreconditionFailedHttpError', 412, PreconditionFailedHttpError ],
|
||||
[ 'PayloadHttpError', 413, PayloadHttpError ],
|
||||
[ 'UnsupportedMediaTypeHttpError', 415, UnsupportedMediaTypeHttpError ],
|
||||
[ 'InternalServerError', 500, InternalServerError ],
|
||||
[ 'NotImplementedHttpError', 501, NotImplementedHttpError ],
|
||||
|
||||
@@ -19,6 +19,8 @@ const portNames = [
|
||||
'SparqlStorage',
|
||||
'Subdomains',
|
||||
'WebSocketsProtocol',
|
||||
'PodQuota',
|
||||
'GlobalQuota',
|
||||
// Unit
|
||||
'BaseHttpServerFactory',
|
||||
] as const;
|
||||
@@ -122,7 +124,7 @@ export function mockFs(rootFilepath?: string, time?: Date): { data: any } {
|
||||
isFile: (): boolean => typeof folder[name] === 'string',
|
||||
isDirectory: (): boolean => typeof folder[name] === 'object',
|
||||
isSymbolicLink: (): boolean => typeof folder[name] === 'symbol',
|
||||
size: typeof folder[name] === 'string' ? folder[name].length : 0,
|
||||
size: typeof folder[name] === 'string' ? folder[name].length : 4,
|
||||
mtime: time,
|
||||
} as Stats;
|
||||
},
|
||||
@@ -199,6 +201,21 @@ export function mockFs(rootFilepath?: string, time?: Date): { data: any } {
|
||||
const { folder, name } = getFolder(path);
|
||||
folder[name] = data;
|
||||
},
|
||||
async rename(path: string, destination: string): Promise<void> {
|
||||
const { folder, name } = getFolder(path);
|
||||
if (!folder[name]) {
|
||||
throwSystemError('ENOENT');
|
||||
}
|
||||
if (!(await this.lstat(path)).isFile()) {
|
||||
throwSystemError('EISDIR');
|
||||
}
|
||||
|
||||
const { folder: folderDest, name: nameDest } = getFolder(destination);
|
||||
folderDest[nameDest] = folder[name];
|
||||
|
||||
// eslint-disable-next-line @typescript-eslint/no-dynamic-delete
|
||||
delete folder[name];
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
|
||||
Reference in New Issue
Block a user