feat: Use rdf-vocabulary library

This commit is contained in:
Joachim Van Herwegen 2024-09-06 09:17:29 +02:00
parent 58574eec07
commit 6d5f56cfe4
15 changed files with 136 additions and 239 deletions

21
package-lock.json generated
View File

@ -70,6 +70,7 @@
"rdf-string": "^1.6.3", "rdf-string": "^1.6.3",
"rdf-terms": "^1.11.0", "rdf-terms": "^1.11.0",
"rdf-validate-shacl": "^0.4.5", "rdf-validate-shacl": "^0.4.5",
"rdf-vocabulary": "^1.0.0",
"sparqlalgebrajs": "^4.3.0", "sparqlalgebrajs": "^4.3.0",
"sparqljs": "^3.7.1", "sparqljs": "^3.7.1",
"url-join": "^4.0.1", "url-join": "^4.0.1",
@ -1123,7 +1124,7 @@
}, },
"node_modules/@clack/prompts/node_modules/is-unicode-supported": { "node_modules/@clack/prompts/node_modules/is-unicode-supported": {
"version": "1.3.0", "version": "1.3.0",
"extraneous": true, "dev": true,
"inBundle": true, "inBundle": true,
"license": "MIT", "license": "MIT",
"engines": { "engines": {
@ -19708,6 +19709,14 @@
"rdf-validate-datatype": "^0.1.5" "rdf-validate-datatype": "^0.1.5"
} }
}, },
"node_modules/rdf-vocabulary": {
"version": "1.0.0",
"resolved": "https://registry.npmjs.org/rdf-vocabulary/-/rdf-vocabulary-1.0.0.tgz",
"integrity": "sha512-aDMeOiyr7ZUx9UjjgQogXW5e+KfJSxa+HML5BIXE2lQiXp+qn9FggfnejBkcnqUCoypFtjDvejZ3UN3gGkurTw==",
"dependencies": {
"@rdfjs/types": "^1.1.0"
}
},
"node_modules/rdfa-streaming-parser": { "node_modules/rdfa-streaming-parser": {
"version": "2.0.1", "version": "2.0.1",
"resolved": "https://registry.npmjs.org/rdfa-streaming-parser/-/rdfa-streaming-parser-2.0.1.tgz", "resolved": "https://registry.npmjs.org/rdfa-streaming-parser/-/rdfa-streaming-parser-2.0.1.tgz",
@ -22578,7 +22587,7 @@
"is-unicode-supported": { "is-unicode-supported": {
"version": "1.3.0", "version": "1.3.0",
"bundled": true, "bundled": true,
"extraneous": true "dev": true
} }
} }
}, },
@ -38082,6 +38091,14 @@
"rdf-validate-datatype": "^0.1.5" "rdf-validate-datatype": "^0.1.5"
} }
}, },
"rdf-vocabulary": {
"version": "1.0.0",
"resolved": "https://registry.npmjs.org/rdf-vocabulary/-/rdf-vocabulary-1.0.0.tgz",
"integrity": "sha512-aDMeOiyr7ZUx9UjjgQogXW5e+KfJSxa+HML5BIXE2lQiXp+qn9FggfnejBkcnqUCoypFtjDvejZ3UN3gGkurTw==",
"requires": {
"@rdfjs/types": "^1.1.0"
}
},
"rdfa-streaming-parser": { "rdfa-streaming-parser": {
"version": "2.0.1", "version": "2.0.1",
"resolved": "https://registry.npmjs.org/rdfa-streaming-parser/-/rdfa-streaming-parser-2.0.1.tgz", "resolved": "https://registry.npmjs.org/rdfa-streaming-parser/-/rdfa-streaming-parser-2.0.1.tgz",

View File

@ -133,6 +133,7 @@
"rdf-string": "^1.6.3", "rdf-string": "^1.6.3",
"rdf-terms": "^1.11.0", "rdf-terms": "^1.11.0",
"rdf-validate-shacl": "^0.4.5", "rdf-validate-shacl": "^0.4.5",
"rdf-vocabulary": "^1.0.0",
"sparqlalgebrajs": "^4.3.0", "sparqlalgebrajs": "^4.3.0",
"sparqljs": "^3.7.1", "sparqljs": "^3.7.1",
"url-join": "^4.0.1", "url-join": "^4.0.1",

View File

@ -1,4 +1,4 @@
import { createVocabulary } from '../../../util/Vocabularies'; import { createVocabulary } from 'rdf-vocabulary';
export const TEMPLATE = createVocabulary( export const TEMPLATE = createVocabulary(
'urn:solid-server:template:', 'urn:solid-server:template:',

View File

@ -1,8 +1,9 @@
import type { VocabularyTerm, VocabularyValue } from 'rdf-vocabulary';
import type { RepresentationMetadata } from '../../http/representation/RepresentationMetadata'; import type { RepresentationMetadata } from '../../http/representation/RepresentationMetadata';
import type { ResourceIdentifier } from '../../http/representation/ResourceIdentifier'; import type { ResourceIdentifier } from '../../http/representation/ResourceIdentifier';
import type { GenericEventEmitter } from '../../util/GenericEventEmitter'; import type { GenericEventEmitter } from '../../util/GenericEventEmitter';
import { createGenericEventEmitterClass } from '../../util/GenericEventEmitter'; import { createGenericEventEmitterClass } from '../../util/GenericEventEmitter';
import type { AS, VocabularyTerm, VocabularyValue } from '../../util/Vocabularies'; import type { AS } from '../../util/Vocabularies';
/** /**
* An event emitter used to report changes made to resources. * An event emitter used to report changes made to resources.

View File

@ -1,9 +1,10 @@
import { getLoggerFor } from 'global-logger-factory'; import { getLoggerFor } from 'global-logger-factory';
import { StaticHandler } from 'asynchronous-handlers'; import { StaticHandler } from 'asynchronous-handlers';
import type { VocabularyTerm } from 'rdf-vocabulary';
import type { RepresentationMetadata } from '../../http/representation/RepresentationMetadata'; import type { RepresentationMetadata } from '../../http/representation/RepresentationMetadata';
import type { ResourceIdentifier } from '../../http/representation/ResourceIdentifier'; import type { ResourceIdentifier } from '../../http/representation/ResourceIdentifier';
import { createErrorMessage } from '../../util/errors/ErrorUtil'; import { createErrorMessage } from '../../util/errors/ErrorUtil';
import type { AS, VocabularyTerm } from '../../util/Vocabularies'; import type { AS } from '../../util/Vocabularies';
import type { ActivityEmitter } from './ActivityEmitter'; import type { ActivityEmitter } from './ActivityEmitter';
import type { NotificationChannelStorage } from './NotificationChannelStorage'; import type { NotificationChannelStorage } from './NotificationChannelStorage';
import type { NotificationHandler } from './NotificationHandler'; import type { NotificationHandler } from './NotificationHandler';

View File

@ -1,7 +1,8 @@
import { AsyncHandler } from 'asynchronous-handlers'; import { AsyncHandler } from 'asynchronous-handlers';
import type { VocabularyTerm } from 'rdf-vocabulary';
import type { RepresentationMetadata } from '../../http/representation/RepresentationMetadata'; import type { RepresentationMetadata } from '../../http/representation/RepresentationMetadata';
import type { ResourceIdentifier } from '../../http/representation/ResourceIdentifier'; import type { ResourceIdentifier } from '../../http/representation/ResourceIdentifier';
import type { AS, VocabularyTerm } from '../../util/Vocabularies'; import type { AS } from '../../util/Vocabularies';
import type { NotificationChannel } from './NotificationChannel'; import type { NotificationChannel } from './NotificationChannel';
export interface NotificationHandlerInput { export interface NotificationHandlerInput {

View File

@ -1,9 +1,10 @@
import { getLoggerFor } from 'global-logger-factory'; import { getLoggerFor } from 'global-logger-factory';
import { StaticHandler } from 'asynchronous-handlers'; import { StaticHandler } from 'asynchronous-handlers';
import type { VocabularyTerm } from 'rdf-vocabulary';
import type { RepresentationMetadata } from '../../../http/representation/RepresentationMetadata'; import type { RepresentationMetadata } from '../../../http/representation/RepresentationMetadata';
import type { ResourceIdentifier } from '../../../http/representation/ResourceIdentifier'; import type { ResourceIdentifier } from '../../../http/representation/ResourceIdentifier';
import { createErrorMessage } from '../../../util/errors/ErrorUtil'; import { createErrorMessage } from '../../../util/errors/ErrorUtil';
import type { AS, VocabularyTerm } from '../../../util/Vocabularies'; import type { AS } from '../../../util/Vocabularies';
import type { ActivityEmitter } from '../ActivityEmitter'; import type { ActivityEmitter } from '../ActivityEmitter';
import type { NotificationHandler } from '../NotificationHandler'; import type { NotificationHandler } from '../NotificationHandler';
import { generateChannel } from './StreamingHttp2023Util'; import { generateChannel } from './StreamingHttp2023Util';

View File

@ -1,107 +1,4 @@
import { DataFactory } from 'n3'; import { createVocabulary } from 'rdf-vocabulary';
import type { NamedNode } from '@rdfjs/types';
/**
* A `Record` in which each value is a concatenation of the baseUrl and its key.
*/
type ExpandedRecord<TBase extends string, TLocal extends string> = {[K in TLocal]: `${TBase}${K}` };
/**
* Has a base URL as `namespace` value and each key has as value the concatenation with that base URL.
*/
type ValueVocabulary<TBase extends string, TLocal extends string> =
{ namespace: TBase } & ExpandedRecord<TBase, TLocal>;
/**
* A {@link ValueVocabulary} where the URI values are {@link NamedNode}s.
*/
type TermVocabulary<T> = T extends ValueVocabulary<string, string> ? {[K in keyof T]: NamedNode<T[K]> } : never;
/**
* Contains a namespace and keys linking to the entries in this namespace.
* The `terms` field contains the same values but as {@link NamedNode} instead of string.
*/
export type Vocabulary<TBase extends string, TKey extends string> =
ValueVocabulary<TBase, TKey> & { terms: TermVocabulary<ValueVocabulary<TBase, TKey>> };
/**
* A {@link Vocabulary} where all the non-namespace fields are of unknown value.
* This is a fallback in case {@link createVocabulary} gets called with a non-strict string array.
*/
export type PartialVocabulary<TBase extends string> =
{ namespace: TBase } &
Partial<Record<string, string>> &
{ terms: { namespace: NamedNode<TBase> } & Partial<Record<string, NamedNode>> };
/**
* A local name of a {@link Vocabulary}.
*/
export type VocabularyLocal<T> = T extends Vocabulary<string, infer TKey> ? TKey : never;
/**
* A URI string entry of a {@link Vocabulary}.
*/
export type VocabularyValue<T> = T extends Vocabulary<string, infer TKey> ? T[TKey] : never;
/**
* A {@link NamedNode} entry of a {@link Vocabulary}.
*/
export type VocabularyTerm<T> = T extends Vocabulary<string, infer TKey> ? T['terms'][TKey] : never;
/**
* Creates a {@link ValueVocabulary} with the given `baseUri` as namespace and all `localNames` as entries.
*/
function createValueVocabulary<TBase extends string, TLocal extends string>(baseUri: TBase, localNames: TLocal[]):
ValueVocabulary<TBase, TLocal> {
const expanded: Partial<ExpandedRecord<TBase, TLocal>> = {};
// Expose the listed local names as properties
for (const localName of localNames) {
expanded[localName] = `${baseUri}${localName}`;
}
return {
namespace: baseUri,
...expanded as ExpandedRecord<TBase, TLocal>,
};
}
/**
* Creates a {@link TermVocabulary} based on the provided {@link ValueVocabulary}.
*/
function createTermVocabulary<TBase extends string, TLocal extends string>(values: ValueVocabulary<TBase, TLocal>):
TermVocabulary<ValueVocabulary<TBase, TLocal>> {
// Need to cast since `fromEntries` typings aren't strict enough
return Object.fromEntries(
Object.entries(values).map(([ key, value ]): [string, NamedNode] => [ key, DataFactory.namedNode(value) ]),
) as TermVocabulary<ValueVocabulary<TBase, TLocal>>;
}
/**
* Creates a {@link Vocabulary} with the given `baseUri` as namespace and all `localNames` as entries.
* The values are the local names expanded from the given base URI as strings.
* The `terms` field contains all the same values but as {@link NamedNode} instead.
*/
export function createVocabulary<TBase extends string, TLocal extends string>(baseUri: TBase, ...localNames: TLocal[]):
string extends TLocal ? PartialVocabulary<TBase> : Vocabulary<TBase, TLocal> {
const values = createValueVocabulary(baseUri, localNames);
return {
...values,
terms: createTermVocabulary(values),
};
}
/**
* Creates a new {@link Vocabulary} that extends an existing one by adding new local names.
*
* @param vocabulary - The {@link Vocabulary} to extend.
* @param newNames - The new local names that need to be added.
*/
export function extendVocabulary<TBase extends string, TLocal extends string, TNew extends string>(
vocabulary: Vocabulary<TBase, TLocal>,
...newNames: TNew[]
):
ReturnType<typeof createVocabulary<TBase, TLocal | TNew>> {
const localNames = Object.keys(vocabulary)
.filter((key): boolean => key !== 'terms' && key !== 'namespace') as TLocal[];
const allNames = [ ...localNames, ...newNames ];
return createVocabulary(vocabulary.namespace, ...allNames);
}
export const ACL = createVocabulary( export const ACL = createVocabulary(
'http://www.w3.org/ns/auth/acl#', 'http://www.w3.org/ns/auth/acl#',

View File

@ -1,3 +1,4 @@
import 'jest-rdf';
import { DataFactory, Parser, Store } from 'n3'; import { DataFactory, Parser, Store } from 'n3';
import { BasicRepresentation } from '../../src/http/representation/BasicRepresentation'; import { BasicRepresentation } from '../../src/http/representation/BasicRepresentation';
import type { App } from '../../src/init/App'; import type { App } from '../../src/init/App';
@ -108,8 +109,8 @@ describe.each(stores)('A server supporting StreamingHTTPChannel2023 using %s', (
try { try {
const quads = await readChunk(reader); const quads = await readChunk(reader);
expect(quads.getObjects(null, RDF.terms.type, null)).toEqual([ AS.terms.Update ]); expect(quads.getObjects(null, RDF.terms.type, null)).toEqualRdfTermArray([ AS.terms.Update ]);
expect(quads.getObjects(null, AS.terms.object, null)).toEqual([ namedNode(topic) ]); expect(quads.getObjects(null, AS.terms.object, null)).toEqualRdfTermArray([ namedNode(topic) ]);
} finally { } finally {
reader.releaseLock(); reader.releaseLock();
await streamingResponse.body!.cancel(); await streamingResponse.body!.cancel();
@ -125,8 +126,8 @@ describe.each(stores)('A server supporting StreamingHTTPChannel2023 using %s', (
try { try {
const quads = await readChunk(reader); const quads = await readChunk(reader);
expect(quads.getObjects(null, RDF.terms.type, null)).toEqual([ AS.terms.Delete ]); expect(quads.getObjects(null, RDF.terms.type, null)).toEqualRdfTermArray([ AS.terms.Delete ]);
expect(quads.getObjects(null, AS.terms.object, null)).toEqual([ namedNode(topic) ]); expect(quads.getObjects(null, AS.terms.object, null)).toEqualRdfTermArray([ namedNode(topic) ]);
} finally { } finally {
reader.releaseLock(); reader.releaseLock();
await streamingResponse.body!.cancel(); await streamingResponse.body!.cancel();
@ -144,13 +145,13 @@ describe.each(stores)('A server supporting StreamingHTTPChannel2023 using %s', (
try { try {
// Expected initial notification // Expected initial notification
const updateQuads = await readChunk(reader); const updateQuads = await readChunk(reader);
expect(updateQuads.getObjects(null, RDF.terms.type, null)).toEqual([ AS.terms.Update ]); expect(updateQuads.getObjects(null, RDF.terms.type, null)).toEqualRdfTermArray([ AS.terms.Update ]);
expect(updateQuads.getObjects(null, AS.terms.object, null)).toEqual([ namedNode(topic) ]); expect(updateQuads.getObjects(null, AS.terms.object, null)).toEqualRdfTermArray([ namedNode(topic) ]);
// Expected initial notification on other receiver // Expected initial notification on other receiver
const otherQuads = await readChunk(otherReader); const otherQuads = await readChunk(otherReader);
expect(otherQuads.getObjects(null, RDF.terms.type, null)).toEqual([ AS.terms.Update ]); expect(otherQuads.getObjects(null, RDF.terms.type, null)).toEqualRdfTermArray([ AS.terms.Update ]);
expect(otherQuads.getObjects(null, AS.terms.object, null)).toEqual([ namedNode(topic) ]); expect(otherQuads.getObjects(null, AS.terms.object, null)).toEqualRdfTermArray([ namedNode(topic) ]);
// Delete resource // Delete resource
const response = await fetch(topic, { const response = await fetch(topic, {
@ -160,8 +161,8 @@ describe.each(stores)('A server supporting StreamingHTTPChannel2023 using %s', (
// If it was caused by the other receiver connecting, it would have been Update as well // If it was caused by the other receiver connecting, it would have been Update as well
const deleteQuads = await readChunk(reader); const deleteQuads = await readChunk(reader);
expect(deleteQuads.getObjects(null, RDF.terms.type, null)).toEqual([ AS.terms.Delete ]); expect(deleteQuads.getObjects(null, RDF.terms.type, null)).toEqualRdfTermArray([ AS.terms.Delete ]);
expect(deleteQuads.getObjects(null, AS.terms.object, null)).toEqual([ namedNode(topic) ]); expect(deleteQuads.getObjects(null, AS.terms.object, null)).toEqualRdfTermArray([ namedNode(topic) ]);
} finally { } finally {
reader.releaseLock(); reader.releaseLock();
await streamingResponse.body!.cancel(); await streamingResponse.body!.cancel();
@ -190,8 +191,8 @@ describe.each(stores)('A server supporting StreamingHTTPChannel2023 using %s', (
expect(response.status).toBe(201); expect(response.status).toBe(201);
const quads = await readChunk(reader); const quads = await readChunk(reader);
expect(quads.getObjects(null, RDF.terms.type, null)).toEqual([ AS.terms.Create ]); expect(quads.getObjects(null, RDF.terms.type, null)).toEqualRdfTermArray([ AS.terms.Create ]);
expect(quads.getObjects(null, AS.terms.object, null)).toEqual([ namedNode(topic) ]); expect(quads.getObjects(null, AS.terms.object, null)).toEqualRdfTermArray([ namedNode(topic) ]);
} finally { } finally {
reader.releaseLock(); reader.releaseLock();
await streamingResponse.body!.cancel(); await streamingResponse.body!.cancel();
@ -216,8 +217,8 @@ describe.each(stores)('A server supporting StreamingHTTPChannel2023 using %s', (
expect(response.status).toBe(205); expect(response.status).toBe(205);
const quads = await readChunk(reader); const quads = await readChunk(reader);
expect(quads.getObjects(null, RDF.terms.type, null)).toEqual([ AS.terms.Update ]); expect(quads.getObjects(null, RDF.terms.type, null)).toEqualRdfTermArray([ AS.terms.Update ]);
expect(quads.getObjects(null, AS.terms.object, null)).toEqual([ namedNode(topic) ]); expect(quads.getObjects(null, AS.terms.object, null)).toEqualRdfTermArray([ namedNode(topic) ]);
} finally { } finally {
reader.releaseLock(); reader.releaseLock();
await streamingResponse.body!.cancel(); await streamingResponse.body!.cancel();
@ -240,8 +241,8 @@ describe.each(stores)('A server supporting StreamingHTTPChannel2023 using %s', (
expect(response.status).toBe(205); expect(response.status).toBe(205);
const quads = await readChunk(reader); const quads = await readChunk(reader);
expect(quads.getObjects(null, RDF.terms.type, null)).toEqual([ AS.terms.Delete ]); expect(quads.getObjects(null, RDF.terms.type, null)).toEqualRdfTermArray([ AS.terms.Delete ]);
expect(quads.getObjects(null, AS.terms.object, null)).toEqual([ namedNode(topic) ]); expect(quads.getObjects(null, AS.terms.object, null)).toEqualRdfTermArray([ namedNode(topic) ]);
} finally { } finally {
reader.releaseLock(); reader.releaseLock();
await streamingResponse.body!.cancel(); await streamingResponse.body!.cancel();
@ -310,9 +311,9 @@ describe.each(stores)('A server supporting StreamingHTTPChannel2023 using %s', (
// Will receive the Add notification // Will receive the Add notification
const addQuads = await readChunk(reader); const addQuads = await readChunk(reader);
expect(addQuads.getObjects(null, RDF.terms.type, null)).toEqual([ AS.terms.Add ]); expect(addQuads.getObjects(null, RDF.terms.type, null)).toEqualRdfTermArray([ AS.terms.Add ]);
expect(addQuads.getObjects(null, AS.terms.object, null)).toEqual([ namedNode(resource) ]); expect(addQuads.getObjects(null, AS.terms.object, null)).toEqualRdfTermArray([ namedNode(resource) ]);
expect(addQuads.getObjects(null, AS.terms.target, null)).toEqual([ namedNode(baseUrl) ]); expect(addQuads.getObjects(null, AS.terms.target, null)).toEqualRdfTermArray([ namedNode(baseUrl) ]);
// Remove contained resource // Remove contained resource
const removeResponse = await fetch(resource, { const removeResponse = await fetch(resource, {
@ -322,9 +323,9 @@ describe.each(stores)('A server supporting StreamingHTTPChannel2023 using %s', (
// Will receive the Remove notification // Will receive the Remove notification
const removeQuads = await readChunk(reader); const removeQuads = await readChunk(reader);
expect(removeQuads.getObjects(null, RDF.terms.type, null)).toEqual([ AS.terms.Remove ]); expect(removeQuads.getObjects(null, RDF.terms.type, null)).toEqualRdfTermArray([ AS.terms.Remove ]);
expect(removeQuads.getObjects(null, AS.terms.object, null)).toEqual([ namedNode(resource) ]); expect(removeQuads.getObjects(null, AS.terms.object, null)).toEqualRdfTermArray([ namedNode(resource) ]);
expect(removeQuads.getObjects(null, AS.terms.target, null)).toEqual([ namedNode(baseUrl) ]); expect(removeQuads.getObjects(null, AS.terms.target, null)).toEqualRdfTermArray([ namedNode(baseUrl) ]);
} finally { } finally {
reader.releaseLock(); reader.releaseLock();
await streamingResponse.body!.cancel(); await streamingResponse.body!.cancel();

View File

@ -1,3 +1,4 @@
import 'jest-rdf';
import { fetch } from 'cross-fetch'; import { fetch } from 'cross-fetch';
import type { NamedNode } from 'n3'; import type { NamedNode } from 'n3';
import { DataFactory, Parser, Store } from 'n3'; import { DataFactory, Parser, Store } from 'n3';
@ -304,8 +305,8 @@ describe.each(stores)('A server supporting WebSocketChannel2023 using %s', (name
const parser = new Parser({ baseIRI: subscriptionUrl }); const parser = new Parser({ baseIRI: subscriptionUrl });
const quads = new Store(parser.parse(await response.text())); const quads = new Store(parser.parse(await response.text()));
expect(quads.getObjects(null, RDF.terms.type, null)).toEqual([ NOTIFY.terms.WebSocketChannel2023 ]); expect(quads.getObjects(null, RDF.terms.type, null)).toEqualRdfTermArray([ NOTIFY.terms.WebSocketChannel2023 ]);
expect(quads.getObjects(null, NOTIFY.terms.topic, null)).toEqual([ namedNode(topic) ]); expect(quads.getObjects(null, NOTIFY.terms.topic, null)).toEqualRdfTermArray([ namedNode(topic) ]);
expect(quads.countQuads(null, NOTIFY.terms.receiveFrom, null, null)).toBe(1); expect(quads.countQuads(null, NOTIFY.terms.receiveFrom, null, null)).toBe(1);
}); });

View File

@ -1,3 +1,4 @@
import 'jest-rdf';
import { DataFactory, Parser } from 'n3'; import { DataFactory, Parser } from 'n3';
import type { Operation } from '../../../../../src/http/Operation'; import type { Operation } from '../../../../../src/http/Operation';
import { BasicRepresentation } from '../../../../../src/http/representation/BasicRepresentation'; import { BasicRepresentation } from '../../../../../src/http/representation/BasicRepresentation';
@ -34,7 +35,7 @@ describe('A WebhookWebId', (): void => {
expect(turtle.data).toBeDefined(); expect(turtle.data).toBeDefined();
const quads = new Parser({ baseIRI: operation.target.path }).parse(await readableToString(turtle.data!)); const quads = new Parser({ baseIRI: operation.target.path }).parse(await readableToString(turtle.data!));
expect(quads).toHaveLength(1); expect(quads).toHaveLength(1);
expect(quads).toEqual([ quad( expect(quads).toEqualRdfQuadArray([ quad(
namedNode('http://example.com/.notifications/webhooks/webid'), namedNode('http://example.com/.notifications/webhooks/webid'),
SOLID.terms.oidcIssuer, SOLID.terms.oidcIssuer,
namedNode('http://example.com'), namedNode('http://example.com'),

View File

@ -265,7 +265,7 @@ describe('A DataAccessorBasedStore', (): void => {
representation.metadata.add(RDF.terms.type, LDP.terms.Container); representation.metadata.add(RDF.terms.type, LDP.terms.Container);
const result = await store.addResource(resourceID, representation); const result = await store.addResource(resourceID, representation);
expect(result.size).toBe(2); expect(result.size).toBe(2);
expect(result.get(resourceID)?.get(SOLID_AS.terms.activity)).toEqual(AS.terms.Add); expect(result.get(resourceID)?.get(SOLID_AS.terms.activity)).toEqualRdfTerm(AS.terms.Add);
const generatedID = [ ...result.keys() ].find((id): boolean => id.path !== resourceID.path)!; const generatedID = [ ...result.keys() ].find((id): boolean => id.path !== resourceID.path)!;
expect(generatedID).toBeDefined(); expect(generatedID).toBeDefined();
@ -278,7 +278,7 @@ describe('A DataAccessorBasedStore', (): void => {
const result = await store.addResource(resourceID, representation); const result = await store.addResource(resourceID, representation);
expect(result.size).toBe(2); expect(result.size).toBe(2);
expect(result.get(resourceID)?.get(SOLID_AS.terms.activity)).toEqual(AS.terms.Add); expect(result.get(resourceID)?.get(SOLID_AS.terms.activity)).toEqualRdfTerm(AS.terms.Add);
const generatedID = [ ...result.keys() ].find((id): boolean => id.path !== resourceID.path)!; const generatedID = [ ...result.keys() ].find((id): boolean => id.path !== resourceID.path)!;
expect(generatedID).toBeDefined(); expect(generatedID).toBeDefined();
@ -287,7 +287,7 @@ describe('A DataAccessorBasedStore', (): void => {
expect(accessor.data[generatedID.path]).toBeDefined(); expect(accessor.data[generatedID.path]).toBeDefined();
await expect(arrayifyStream(accessor.data[generatedID.path].data)).resolves.toEqual([ resourceData ]); await expect(arrayifyStream(accessor.data[generatedID.path].data)).resolves.toEqual([ resourceData ]);
expect(accessor.data[generatedID.path].metadata.get(DC.terms.modified)?.value).toBe(now.toISOString()); expect(accessor.data[generatedID.path].metadata.get(DC.terms.modified)?.value).toBe(now.toISOString());
expect(result.get(generatedID)?.get(SOLID_AS.terms.activity)).toEqual(AS.terms.Create); expect(result.get(generatedID)?.get(SOLID_AS.terms.activity)).toEqualRdfTerm(AS.terms.Create);
expect(result.get(resourceID)?.get(AS.terms.object)?.value).toEqual(generatedID.path); expect(result.get(resourceID)?.get(AS.terms.object)?.value).toEqual(generatedID.path);
}); });
@ -298,7 +298,7 @@ describe('A DataAccessorBasedStore', (): void => {
const result = await store.addResource(resourceID, representation); const result = await store.addResource(resourceID, representation);
expect(result.size).toBe(2); expect(result.size).toBe(2);
expect(result.get(resourceID)?.get(SOLID_AS.terms.activity)).toEqual(AS.terms.Add); expect(result.get(resourceID)?.get(SOLID_AS.terms.activity)).toEqualRdfTerm(AS.terms.Add);
const generatedID = [ ...result.keys() ].find((id): boolean => id.path !== resourceID.path)!; const generatedID = [ ...result.keys() ].find((id): boolean => id.path !== resourceID.path)!;
expect(generatedID).toBeDefined(); expect(generatedID).toBeDefined();
@ -306,7 +306,7 @@ describe('A DataAccessorBasedStore', (): void => {
expect(accessor.data[generatedID.path]).toBeDefined(); expect(accessor.data[generatedID.path]).toBeDefined();
expect(accessor.data[generatedID.path].metadata.contentType).toBeUndefined(); expect(accessor.data[generatedID.path].metadata.contentType).toBeUndefined();
expect(result.get(generatedID)?.get(SOLID_AS.terms.activity)).toEqual(AS.terms.Create); expect(result.get(generatedID)?.get(SOLID_AS.terms.activity)).toEqualRdfTerm(AS.terms.Create);
const { metadata } = await store.getRepresentation(generatedID); const { metadata } = await store.getRepresentation(generatedID);
expect(metadata.get(DC.terms.modified)?.value).toBe(now.toISOString()); expect(metadata.get(DC.terms.modified)?.value).toBe(now.toISOString());
@ -319,8 +319,8 @@ describe('A DataAccessorBasedStore', (): void => {
const result = await store.addResource(resourceID, representation); const result = await store.addResource(resourceID, representation);
expect(result.size).toBe(2); expect(result.size).toBe(2);
expect(result.get(resourceID)?.get(SOLID_AS.terms.activity)).toEqual(AS.terms.Add); expect(result.get(resourceID)?.get(SOLID_AS.terms.activity)).toEqualRdfTerm(AS.terms.Add);
expect(result.get({ path: `${root}newName` })?.get(SOLID_AS.terms.activity)).toEqual(AS.terms.Create); expect(result.get({ path: `${root}newName` })?.get(SOLID_AS.terms.activity)).toEqualRdfTerm(AS.terms.Create);
}); });
it('errors on a slug ending on / without Link rel:type Container header.', async(): Promise<void> => { it('errors on a slug ending on / without Link rel:type Container header.', async(): Promise<void> => {
@ -344,8 +344,9 @@ describe('A DataAccessorBasedStore', (): void => {
const result = await store.addResource(resourceID, representation); const result = await store.addResource(resourceID, representation);
expect(result.size).toBe(2); expect(result.size).toBe(2);
expect(result.get(resourceID)?.get(SOLID_AS.terms.activity)).toEqual(AS.terms.Add); expect(result.get(resourceID)?.get(SOLID_AS.terms.activity)).toEqualRdfTerm(AS.terms.Add);
expect(result.get({ path: `${root}newContainer/` })?.get(SOLID_AS.terms.activity)).toEqual(AS.terms.Create); expect(result.get({ path: `${root}newContainer/` })?.get(SOLID_AS.terms.activity))
.toEqualRdfTerm(AS.terms.Create);
}); });
it('generates a new URI if adding the slug would create an existing URI.', async(): Promise<void> => { it('generates a new URI if adding the slug would create an existing URI.', async(): Promise<void> => {
@ -368,8 +369,8 @@ describe('A DataAccessorBasedStore', (): void => {
const result = await store.addResource(resourceID, representation); const result = await store.addResource(resourceID, representation);
expect(result.size).toBe(2); expect(result.size).toBe(2);
expect(result.get(resourceID)?.get(SOLID_AS.terms.activity)).toEqual(AS.terms.Add); expect(result.get(resourceID)?.get(SOLID_AS.terms.activity)).toEqualRdfTerm(AS.terms.Add);
expect(result.get({ path: `${root}%26%26` })?.get(SOLID_AS.terms.activity)).toEqual(AS.terms.Create); expect(result.get({ path: `${root}%26%26` })?.get(SOLID_AS.terms.activity)).toEqualRdfTerm(AS.terms.Create);
}); });
it('errors if the slug contains a slash.', async(): Promise<void> => { it('errors if the slug contains a slash.', async(): Promise<void> => {
@ -434,7 +435,7 @@ describe('A DataAccessorBasedStore', (): void => {
const result = await store.setRepresentation(resourceID, representation); const result = await store.setRepresentation(resourceID, representation);
expect(result.size).toBe(1); expect(result.size).toBe(1);
expect(result.get(resourceID)?.get(SOLID_AS.terms.activity)).toEqual(AS.terms.Create); expect(result.get(resourceID)?.get(SOLID_AS.terms.activity)).toEqualRdfTerm(AS.terms.Create);
expect(mock).toHaveBeenCalledTimes(1); expect(mock).toHaveBeenCalledTimes(1);
expect(mock).toHaveBeenLastCalledWith(resourceID); expect(mock).toHaveBeenLastCalledWith(resourceID);
@ -459,9 +460,9 @@ describe('A DataAccessorBasedStore', (): void => {
const resourceID = { path: `${root}resource` }; const resourceID = { path: `${root}resource` };
const result = await store.setRepresentation(resourceID, representation); const result = await store.setRepresentation(resourceID, representation);
expect(result.size).toBe(2); expect(result.size).toBe(2);
expect(result.get({ path: root })?.get(SOLID_AS.terms.activity)).toEqual(AS.terms.Add); expect(result.get({ path: root })?.get(SOLID_AS.terms.activity)).toEqualRdfTerm(AS.terms.Add);
expect(result.get({ path: root })?.get(AS.terms.object)?.value).toEqual(resourceID.path); expect(result.get({ path: root })?.get(AS.terms.object)?.value).toEqual(resourceID.path);
expect(result.get(resourceID)?.get(SOLID_AS.terms.activity)).toEqual(AS.terms.Create); expect(result.get(resourceID)?.get(SOLID_AS.terms.activity)).toEqualRdfTerm(AS.terms.Create);
await expect(arrayifyStream(accessor.data[resourceID.path].data)).resolves.toEqual([ resourceData ]); await expect(arrayifyStream(accessor.data[resourceID.path].data)).resolves.toEqual([ resourceData ]);
expect(accessor.data[resourceID.path].metadata.get(DC.terms.modified)?.value).toBe(now.toISOString()); expect(accessor.data[resourceID.path].metadata.get(DC.terms.modified)?.value).toBe(now.toISOString());
expect(accessor.data[root].metadata.get(DC.terms.modified)?.value).toBe(now.toISOString()); expect(accessor.data[root].metadata.get(DC.terms.modified)?.value).toBe(now.toISOString());
@ -477,8 +478,8 @@ describe('A DataAccessorBasedStore', (): void => {
representation.data = guardedStreamFrom([ `<${root}resource/> a <coolContainer>.` ]); representation.data = guardedStreamFrom([ `<${root}resource/> a <coolContainer>.` ]);
const result = await store.setRepresentation(resourceID, representation); const result = await store.setRepresentation(resourceID, representation);
expect(result.size).toBe(2); expect(result.size).toBe(2);
expect(result.get({ path: root })?.get(SOLID_AS.terms.activity)).toEqual(AS.terms.Add); expect(result.get({ path: root })?.get(SOLID_AS.terms.activity)).toEqualRdfTerm(AS.terms.Add);
expect(result.get(resourceID)?.get(SOLID_AS.terms.activity)).toEqual(AS.terms.Create); expect(result.get(resourceID)?.get(SOLID_AS.terms.activity)).toEqualRdfTerm(AS.terms.Create);
expect(accessor.data[resourceID.path]).toBeTruthy(); expect(accessor.data[resourceID.path]).toBeTruthy();
expect(accessor.data[resourceID.path].metadata.contentType).toBeUndefined(); expect(accessor.data[resourceID.path].metadata.contentType).toBeUndefined();
expect(accessor.data[resourceID.path].metadata.get(DC.terms.modified)?.value).toBe(now.toISOString()); expect(accessor.data[resourceID.path].metadata.get(DC.terms.modified)?.value).toBe(now.toISOString());
@ -490,8 +491,8 @@ describe('A DataAccessorBasedStore', (): void => {
const resourceID = { path: `${root}resource` }; const resourceID = { path: `${root}resource` };
const result = await store.setRepresentation(resourceID, representation); const result = await store.setRepresentation(resourceID, representation);
expect(result.size).toBe(2); expect(result.size).toBe(2);
expect(result.get({ path: root })?.get(SOLID_AS.terms.activity)).toEqual(AS.terms.Add); expect(result.get({ path: root })?.get(SOLID_AS.terms.activity)).toEqualRdfTerm(AS.terms.Add);
expect(result.get(resourceID)?.get(SOLID_AS.terms.activity)).toEqual(AS.terms.Create); expect(result.get(resourceID)?.get(SOLID_AS.terms.activity)).toEqualRdfTerm(AS.terms.Create);
await expect(arrayifyStream(accessor.data[resourceID.path].data)).resolves.toEqual([ resourceData ]); await expect(arrayifyStream(accessor.data[resourceID.path].data)).resolves.toEqual([ resourceData ]);
expect(accessor.data[resourceID.path].metadata.get(DC.terms.modified)?.value).toBe(now.toISOString()); expect(accessor.data[resourceID.path].metadata.get(DC.terms.modified)?.value).toBe(now.toISOString());
expect(accessor.data[root].metadata.get(DC.terms.modified)?.value).toBe(now.toISOString()); expect(accessor.data[root].metadata.get(DC.terms.modified)?.value).toBe(now.toISOString());
@ -501,7 +502,7 @@ describe('A DataAccessorBasedStore', (): void => {
mockDate.mockReturnValue(later); mockDate.mockReturnValue(later);
const result2 = await store.setRepresentation(resourceID, representation); const result2 = await store.setRepresentation(resourceID, representation);
expect(result2.size).toBe(1); expect(result2.size).toBe(1);
expect(result2.get(resourceID)?.get(SOLID_AS.terms.activity)).toEqual(AS.terms.Update); expect(result2.get(resourceID)?.get(SOLID_AS.terms.activity)).toEqualRdfTerm(AS.terms.Update);
await expect(arrayifyStream(accessor.data[resourceID.path].data)).resolves.toEqual([ 'updatedText' ]); await expect(arrayifyStream(accessor.data[resourceID.path].data)).resolves.toEqual([ 'updatedText' ]);
expect(accessor.data[resourceID.path].metadata.get(DC.terms.modified)?.value).toBe(later.toISOString()); expect(accessor.data[resourceID.path].metadata.get(DC.terms.modified)?.value).toBe(later.toISOString());
expect(accessor.data[root].metadata.get(DC.terms.modified)?.value).toBe(now.toISOString()); expect(accessor.data[root].metadata.get(DC.terms.modified)?.value).toBe(now.toISOString());
@ -514,8 +515,8 @@ describe('A DataAccessorBasedStore', (): void => {
representation.metadata.add(namedNode('gen'), 'value', SOLID_META.terms.ResponseMetadata); representation.metadata.add(namedNode('gen'), 'value', SOLID_META.terms.ResponseMetadata);
const result = await store.setRepresentation(resourceID, representation); const result = await store.setRepresentation(resourceID, representation);
expect(result.size).toBe(2); expect(result.size).toBe(2);
expect(result.get({ path: root })?.get(SOLID_AS.terms.activity)).toEqual(AS.terms.Add); expect(result.get({ path: root })?.get(SOLID_AS.terms.activity)).toEqualRdfTerm(AS.terms.Add);
expect(result.get(resourceID)?.get(SOLID_AS.terms.activity)).toEqual(AS.terms.Create); expect(result.get(resourceID)?.get(SOLID_AS.terms.activity)).toEqualRdfTerm(AS.terms.Create);
await expect(arrayifyStream(accessor.data[resourceID.path].data)).resolves.toEqual([ resourceData ]); await expect(arrayifyStream(accessor.data[resourceID.path].data)).resolves.toEqual([ resourceData ]);
expect(accessor.data[resourceID.path].metadata.get(namedNode('notGen'))?.value).toBe('value'); expect(accessor.data[resourceID.path].metadata.get(namedNode('notGen'))?.value).toBe('value');
expect(accessor.data[resourceID.path].metadata.get(namedNode('gen'))).toBeUndefined(); expect(accessor.data[resourceID.path].metadata.get(namedNode('gen'))).toBeUndefined();
@ -526,8 +527,8 @@ describe('A DataAccessorBasedStore', (): void => {
const resourceID = { path: `${root}resource` }; const resourceID = { path: `${root}resource` };
const result = await store.setRepresentation(resourceID, representation); const result = await store.setRepresentation(resourceID, representation);
expect(result.size).toBe(2); expect(result.size).toBe(2);
expect(result.get({ path: root })?.get(SOLID_AS.terms.activity)).toEqual(AS.terms.Create); expect(result.get({ path: root })?.get(SOLID_AS.terms.activity)).toEqualRdfTerm(AS.terms.Create);
expect(result.get(resourceID)?.get(SOLID_AS.terms.activity)).toEqual(AS.terms.Create); expect(result.get(resourceID)?.get(SOLID_AS.terms.activity)).toEqualRdfTerm(AS.terms.Create);
await expect(arrayifyStream(accessor.data[resourceID.path].data)).resolves.toEqual([ resourceData ]); await expect(arrayifyStream(accessor.data[resourceID.path].data)).resolves.toEqual([ resourceData ]);
}); });
@ -535,10 +536,10 @@ describe('A DataAccessorBasedStore', (): void => {
const resourceID = { path: `${root}a/b/resource` }; const resourceID = { path: `${root}a/b/resource` };
const result = await store.setRepresentation(resourceID, representation); const result = await store.setRepresentation(resourceID, representation);
expect(result.size).toBe(4); expect(result.size).toBe(4);
expect(result.get({ path: root })?.get(SOLID_AS.terms.activity)).toEqual(AS.terms.Add); expect(result.get({ path: root })?.get(SOLID_AS.terms.activity)).toEqualRdfTerm(AS.terms.Add);
expect(result.get({ path: `${root}a/` })?.get(SOLID_AS.terms.activity)).toEqual(AS.terms.Create); expect(result.get({ path: `${root}a/` })?.get(SOLID_AS.terms.activity)).toEqualRdfTerm(AS.terms.Create);
expect(result.get({ path: `${root}a/b/` })?.get(SOLID_AS.terms.activity)).toEqual(AS.terms.Create); expect(result.get({ path: `${root}a/b/` })?.get(SOLID_AS.terms.activity)).toEqualRdfTerm(AS.terms.Create);
expect(result.get({ path: `${root}a/b/resource` })?.get(SOLID_AS.terms.activity)).toEqual(AS.terms.Create); expect(result.get({ path: `${root}a/b/resource` })?.get(SOLID_AS.terms.activity)).toEqualRdfTerm(AS.terms.Create);
await expect(arrayifyStream(accessor.data[resourceID.path].data)).resolves.toEqual([ resourceData ]); await expect(arrayifyStream(accessor.data[resourceID.path].data)).resolves.toEqual([ resourceData ]);
expect(accessor.data[`${root}a/`].metadata.getAll(RDF.terms.type).map((type): string => type.value)) expect(accessor.data[`${root}a/`].metadata.getAll(RDF.terms.type).map((type): string => type.value))
.toContain(LDP.Container); .toContain(LDP.Container);
@ -564,7 +565,7 @@ describe('A DataAccessorBasedStore', (): void => {
representation.data = guardedStreamFrom([]); representation.data = guardedStreamFrom([]);
const result = await store.setRepresentation(resourceID, representation); const result = await store.setRepresentation(resourceID, representation);
expect(result.size).toBe(1); expect(result.size).toBe(1);
expect(result.get(resourceID)?.get(SOLID_AS.terms.activity)).toEqual(AS.terms.Create); expect(result.get(resourceID)?.get(SOLID_AS.terms.activity)).toEqualRdfTerm(AS.terms.Create);
expect(accessor.data[resourceID.path]).toBeTruthy(); expect(accessor.data[resourceID.path]).toBeTruthy();
expect(Object.keys(accessor.data)).toHaveLength(1); expect(Object.keys(accessor.data)).toHaveLength(1);
expect(accessor.data[resourceID.path].metadata.contentType).toBeUndefined(); expect(accessor.data[resourceID.path].metadata.contentType).toBeUndefined();
@ -582,7 +583,7 @@ describe('A DataAccessorBasedStore', (): void => {
) ], resourceID); ) ], resourceID);
const result = await store.setRepresentation(metaResourceID, metaRepresentation); const result = await store.setRepresentation(metaResourceID, metaRepresentation);
expect(result.get(resourceID)?.get(SOLID_AS.terms.activity)).toEqual(AS.terms.Update); expect(result.get(resourceID)?.get(SOLID_AS.terms.activity)).toEqualRdfTerm(AS.terms.Update);
expect(accessor.data[resourceID.path].metadata.quads()).toBeRdfIsomorphic([ expect(accessor.data[resourceID.path].metadata.quads()).toBeRdfIsomorphic([
quad( quad(
namedNode(resourceID.path), namedNode(resourceID.path),
@ -605,7 +606,7 @@ describe('A DataAccessorBasedStore', (): void => {
const metaRepresentation = new BasicRepresentation(guardedStreamFrom(quads), resourceID, INTERNAL_QUADS); const metaRepresentation = new BasicRepresentation(guardedStreamFrom(quads), resourceID, INTERNAL_QUADS);
const result = await store.setRepresentation(metaResourceID, metaRepresentation); const result = await store.setRepresentation(metaResourceID, metaRepresentation);
expect(result.get(resourceID)?.get(SOLID_AS.terms.activity)).toEqual(AS.terms.Update); expect(result.get(resourceID)?.get(SOLID_AS.terms.activity)).toEqualRdfTerm(AS.terms.Update);
expect(accessor.data[resourceID.path].metadata.quads()).toBeRdfIsomorphic(quads); expect(accessor.data[resourceID.path].metadata.quads()).toBeRdfIsomorphic(quads);
}); });
@ -767,9 +768,9 @@ describe('A DataAccessorBasedStore', (): void => {
accessor.data[resourceID.path] = representation; accessor.data[resourceID.path] = representation;
const result = await store.deleteResource(resourceID); const result = await store.deleteResource(resourceID);
expect(result.size).toBe(2); expect(result.size).toBe(2);
expect(result.get({ path: root })?.get(SOLID_AS.terms.activity)).toEqual(AS.terms.Remove); expect(result.get({ path: root })?.get(SOLID_AS.terms.activity)).toEqualRdfTerm(AS.terms.Remove);
expect(result.get({ path: root })?.get(AS.terms.object)?.value).toEqual(resourceID.path); expect(result.get({ path: root })?.get(AS.terms.object)?.value).toEqual(resourceID.path);
expect(result.get(resourceID)?.get(SOLID_AS.terms.activity)).toEqual(AS.terms.Delete); expect(result.get(resourceID)?.get(SOLID_AS.terms.activity)).toEqualRdfTerm(AS.terms.Delete);
expect(accessor.data[resourceID.path]).toBeUndefined(); expect(accessor.data[resourceID.path]).toBeUndefined();
expect(accessor.data[root].metadata.get(DC.terms.modified)?.value).toBe(now.toISOString()); expect(accessor.data[root].metadata.get(DC.terms.modified)?.value).toBe(now.toISOString());
expect(accessor.data[root].metadata.get(GENERATED_PREDICATE)).toBeUndefined(); expect(accessor.data[root].metadata.get(GENERATED_PREDICATE)).toBeUndefined();
@ -779,7 +780,7 @@ describe('A DataAccessorBasedStore', (): void => {
accessor.data[root] = new BasicRepresentation(representation.data, containerMetadata); accessor.data[root] = new BasicRepresentation(representation.data, containerMetadata);
const result = await store.deleteResource({ path: root }); const result = await store.deleteResource({ path: root });
expect(result.size).toBe(1); expect(result.size).toBe(1);
expect(result.get({ path: root })?.get(SOLID_AS.terms.activity)).toEqual(AS.terms.Delete); expect(result.get({ path: root })?.get(SOLID_AS.terms.activity)).toEqualRdfTerm(AS.terms.Delete);
expect(accessor.data[root]).toBeUndefined(); expect(accessor.data[root]).toBeUndefined();
}); });
@ -792,8 +793,8 @@ describe('A DataAccessorBasedStore', (): void => {
jest.spyOn(auxiliaryStrategy, 'isRequiredInRoot').mockReturnValue(true); jest.spyOn(auxiliaryStrategy, 'isRequiredInRoot').mockReturnValue(true);
const result = await store.deleteResource(auxResourceID); const result = await store.deleteResource(auxResourceID);
expect(result.size).toBe(2); expect(result.size).toBe(2);
expect(result.get(resourceID)?.get(SOLID_AS.terms.activity)).toEqual(AS.terms.Remove); expect(result.get(resourceID)?.get(SOLID_AS.terms.activity)).toEqualRdfTerm(AS.terms.Remove);
expect(result.get(auxResourceID)?.get(SOLID_AS.terms.activity)).toEqual(AS.terms.Delete); expect(result.get(auxResourceID)?.get(SOLID_AS.terms.activity)).toEqualRdfTerm(AS.terms.Delete);
expect(accessor.data[auxResourceID.path]).toBeUndefined(); expect(accessor.data[auxResourceID.path]).toBeUndefined();
}); });
@ -805,9 +806,9 @@ describe('A DataAccessorBasedStore', (): void => {
const result = await store.deleteResource(resourceID); const result = await store.deleteResource(resourceID);
expect(result.size).toBe(3); expect(result.size).toBe(3);
expect(result.get({ path: root })?.get(SOLID_AS.terms.activity)).toEqual(AS.terms.Remove); expect(result.get({ path: root })?.get(SOLID_AS.terms.activity)).toEqualRdfTerm(AS.terms.Remove);
expect(result.get(resourceID)?.get(SOLID_AS.terms.activity)).toEqual(AS.terms.Delete); expect(result.get(resourceID)?.get(SOLID_AS.terms.activity)).toEqualRdfTerm(AS.terms.Delete);
expect(result.get(auxResourceID)?.get(SOLID_AS.terms.activity)).toEqual(AS.terms.Delete); expect(result.get(auxResourceID)?.get(SOLID_AS.terms.activity)).toEqualRdfTerm(AS.terms.Delete);
expect(accessor.data[resourceID.path]).toBeUndefined(); expect(accessor.data[resourceID.path]).toBeUndefined();
expect(accessor.data[auxResourceID.path]).toBeUndefined(); expect(accessor.data[auxResourceID.path]).toBeUndefined();
}); });
@ -829,8 +830,8 @@ describe('A DataAccessorBasedStore', (): void => {
jest.spyOn(logger, 'error').mockImplementation(); jest.spyOn(logger, 'error').mockImplementation();
const result = await store.deleteResource(resourceID); const result = await store.deleteResource(resourceID);
expect(result.size).toBe(2); expect(result.size).toBe(2);
expect(result.get({ path: root })?.get(SOLID_AS.terms.activity)).toEqual(AS.terms.Remove); expect(result.get({ path: root })?.get(SOLID_AS.terms.activity)).toEqualRdfTerm(AS.terms.Remove);
expect(result.get(resourceID)?.get(SOLID_AS.terms.activity)).toEqual(AS.terms.Delete); expect(result.get(resourceID)?.get(SOLID_AS.terms.activity)).toEqualRdfTerm(AS.terms.Delete);
expect(accessor.data[resourceID.path]).toBeUndefined(); expect(accessor.data[resourceID.path]).toBeUndefined();
expect(accessor.data[auxResourceID.path]).toBeDefined(); expect(accessor.data[auxResourceID.path]).toBeDefined();
expect(logger.error).toHaveBeenCalledTimes(1); expect(logger.error).toHaveBeenCalledTimes(1);

View File

@ -14,10 +14,10 @@ describe('A MonitoringStore', (): void => {
const idNew = { path: 'http://example.org/foo/bar/new' }; const idNew = { path: 'http://example.org/foo/bar/new' };
const idOld = { path: 'http://example.org/foo/bar/old' }; const idOld = { path: 'http://example.org/foo/bar/old' };
let changedCallback: () => void; let changedCallback: jest.Mock;
let createdCallback: () => void; let createdCallback: jest.Mock;
let updatedCallback: () => void; let updatedCallback: jest.Mock;
let deletedCallback: () => void; let deletedCallback: jest.Mock;
const addResourceReturnMock: ChangeMap = new IdentifierMap([ const addResourceReturnMock: ChangeMap = new IdentifierMap([
[ idNew, new RepresentationMetadata({ [SOLID_AS.activity]: AS.terms.Create }) ], [ idNew, new RepresentationMetadata({ [SOLID_AS.activity]: AS.terms.Create }) ],
@ -81,8 +81,14 @@ describe('A MonitoringStore', (): void => {
expect(changedCallback).toHaveBeenCalledTimes(0); expect(changedCallback).toHaveBeenCalledTimes(0);
await result; await result;
expect(changedCallback).toHaveBeenCalledTimes(2); expect(changedCallback).toHaveBeenCalledTimes(2);
expect(changedCallback).toHaveBeenCalledWith(id, AS.terms.Update, addResourceReturnMock.get(id)); expect(changedCallback).toHaveBeenCalledWith(id, expect.objectContaining({
expect(changedCallback).toHaveBeenCalledWith(idNew, AS.terms.Create, addResourceReturnMock.get(idNew)); termType: 'NamedNode',
value: AS.Update,
}), addResourceReturnMock.get(id));
expect(changedCallback).toHaveBeenCalledWith(idNew, expect.objectContaining({
termType: 'NamedNode',
value: AS.Create,
}), addResourceReturnMock.get(idNew));
expect(createdCallback).toHaveBeenCalledTimes(1); expect(createdCallback).toHaveBeenCalledTimes(1);
expect(createdCallback).toHaveBeenCalledWith(idNew, addResourceReturnMock.get(idNew)); expect(createdCallback).toHaveBeenCalledWith(idNew, addResourceReturnMock.get(idNew));
expect(updatedCallback).toHaveBeenCalledTimes(1); expect(updatedCallback).toHaveBeenCalledTimes(1);
@ -102,7 +108,10 @@ describe('A MonitoringStore', (): void => {
expect(changedCallback).toHaveBeenCalledTimes(0); expect(changedCallback).toHaveBeenCalledTimes(0);
await result; await result;
expect(changedCallback).toHaveBeenCalledTimes(1); expect(changedCallback).toHaveBeenCalledTimes(1);
expect(changedCallback).toHaveBeenCalledWith(idNew, AS.terms.Update, setRepresentationReturnMock.get(idNew)); expect(changedCallback).toHaveBeenCalledWith(idNew, expect.objectContaining({
termType: 'NamedNode',
value: AS.Update,
}), setRepresentationReturnMock.get(idNew));
expect(createdCallback).toHaveBeenCalledTimes(0); expect(createdCallback).toHaveBeenCalledTimes(0);
expect(updatedCallback).toHaveBeenCalledTimes(1); expect(updatedCallback).toHaveBeenCalledTimes(1);
expect(updatedCallback).toHaveBeenCalledWith(idNew, setRepresentationReturnMock.get(idNew)); expect(updatedCallback).toHaveBeenCalledWith(idNew, setRepresentationReturnMock.get(idNew));
@ -121,8 +130,14 @@ describe('A MonitoringStore', (): void => {
expect(changedCallback).toHaveBeenCalledTimes(0); expect(changedCallback).toHaveBeenCalledTimes(0);
await result; await result;
expect(changedCallback).toHaveBeenCalledTimes(2); expect(changedCallback).toHaveBeenCalledTimes(2);
expect(changedCallback).toHaveBeenCalledWith(id, AS.terms.Update, deleteResourceReturnMock.get(id)); expect(changedCallback).toHaveBeenCalledWith(id, expect.objectContaining({
expect(changedCallback).toHaveBeenCalledWith(idNew, AS.terms.Delete, deleteResourceReturnMock.get(idNew)); termType: 'NamedNode',
value: AS.Update,
}), deleteResourceReturnMock.get(id));
expect(changedCallback).toHaveBeenCalledWith(idNew, expect.objectContaining({
termType: 'NamedNode',
value: AS.Delete,
}), deleteResourceReturnMock.get(idNew));
expect(createdCallback).toHaveBeenCalledTimes(0); expect(createdCallback).toHaveBeenCalledTimes(0);
expect(updatedCallback).toHaveBeenCalledTimes(1); expect(updatedCallback).toHaveBeenCalledTimes(1);
expect(updatedCallback).toHaveBeenCalledWith(id, deleteResourceReturnMock.get(id)); expect(updatedCallback).toHaveBeenCalledWith(id, deleteResourceReturnMock.get(id));
@ -142,9 +157,18 @@ describe('A MonitoringStore', (): void => {
expect(changedCallback).toHaveBeenCalledTimes(0); expect(changedCallback).toHaveBeenCalledTimes(0);
await result; await result;
expect(changedCallback).toHaveBeenCalledTimes(3); expect(changedCallback).toHaveBeenCalledTimes(3);
expect(changedCallback).toHaveBeenCalledWith(idNew, AS.terms.Create, modifyResourceReturnMock.get(idNew)); expect(changedCallback).toHaveBeenCalledWith(id, expect.objectContaining({
expect(changedCallback).toHaveBeenCalledWith(id, AS.terms.Update, modifyResourceReturnMock.get(id)); termType: 'NamedNode',
expect(changedCallback).toHaveBeenCalledWith(idOld, AS.terms.Delete, modifyResourceReturnMock.get(idOld)); value: AS.Update,
}), modifyResourceReturnMock.get(id));
expect(changedCallback).toHaveBeenCalledWith(idNew, expect.objectContaining({
termType: 'NamedNode',
value: AS.Create,
}), modifyResourceReturnMock.get(idNew));
expect(changedCallback).toHaveBeenCalledWith(idOld, expect.objectContaining({
termType: 'NamedNode',
value: AS.Delete,
}), modifyResourceReturnMock.get(idOld));
expect(createdCallback).toHaveBeenCalledTimes(1); expect(createdCallback).toHaveBeenCalledTimes(1);
expect(createdCallback).toHaveBeenCalledWith(idNew, modifyResourceReturnMock.get(idNew)); expect(createdCallback).toHaveBeenCalledWith(idNew, modifyResourceReturnMock.get(idNew));
expect(updatedCallback).toHaveBeenCalledTimes(1); expect(updatedCallback).toHaveBeenCalledTimes(1);

View File

@ -114,7 +114,7 @@ describe('An InMemoryDataAccessor', (): void => {
expect(metadata.identifier.value).toBe(`${base}resource`); expect(metadata.identifier.value).toBe(`${base}resource`);
const quads = metadata.quads(); const quads = metadata.quads();
expect(quads).toHaveLength(3); expect(quads).toHaveLength(3);
expect(metadata.get(RDF.terms.type)).toEqual(LDP.terms.Resource); expect(metadata.get(RDF.terms.type)).toEqualRdfTerm(LDP.terms.Resource);
expect(metadata.contentType).toBe('text/turtle'); expect(metadata.contentType).toBe('text/turtle');
expect(metadata.get(POSIX.terms.size)?.value).toBe('4'); expect(metadata.get(POSIX.terms.size)?.value).toBe('4');
}); });

View File

@ -1,50 +0,0 @@
import { DataFactory } from 'n3';
import { createVocabulary, extendVocabulary } from '../../../src/util/Vocabularies';
describe('Vocabularies', (): void => {
const vocabulary = createVocabulary('http://www.w3.org/ns/ldp#', 'contains', 'Container');
describe('createVocabulary', (): void => {
it('contains its own URI.', (): void => {
expect(vocabulary.namespace).toBe('http://www.w3.org/ns/ldp#');
});
it('contains its own URI as a term.', (): void => {
expect(vocabulary.terms.namespace).toEqual(DataFactory.namedNode('http://www.w3.org/ns/ldp#'));
});
it('exposes the defined URIs.', (): void => {
expect(vocabulary.contains).toBe('http://www.w3.org/ns/ldp#contains');
expect(vocabulary.Container).toBe('http://www.w3.org/ns/ldp#Container');
});
it('exposes the defined URIs as terms.', (): void => {
expect(vocabulary.terms.contains).toEqual(DataFactory.namedNode('http://www.w3.org/ns/ldp#contains'));
expect(vocabulary.terms.Container).toEqual(DataFactory.namedNode('http://www.w3.org/ns/ldp#Container'));
});
});
describe('extendVocabulary', (): void => {
const extended = extendVocabulary(vocabulary, 'extended', 'extra');
it('still contains all the original values.', async(): Promise<void> => {
expect(extended.namespace).toBe('http://www.w3.org/ns/ldp#');
expect(extended.terms.namespace).toEqual(DataFactory.namedNode('http://www.w3.org/ns/ldp#'));
expect(extended.contains).toBe('http://www.w3.org/ns/ldp#contains');
expect(extended.Container).toBe('http://www.w3.org/ns/ldp#Container');
expect(extended.terms.contains).toEqual(DataFactory.namedNode('http://www.w3.org/ns/ldp#contains'));
expect(extended.terms.Container).toEqual(DataFactory.namedNode('http://www.w3.org/ns/ldp#Container'));
});
it('contains the new values.', async(): Promise<void> => {
expect(extended.extended).toBe('http://www.w3.org/ns/ldp#extended');
expect(extended.extra).toBe('http://www.w3.org/ns/ldp#extra');
expect(extended.terms.extended).toEqual(DataFactory.namedNode('http://www.w3.org/ns/ldp#extended'));
expect(extended.terms.extra).toEqual(DataFactory.namedNode('http://www.w3.org/ns/ldp#extra'));
});
it('does not modify the original vocabulary.', async(): Promise<void> => {
expect((vocabulary as any).extended).toBeUndefined();
});
});
});