refactor: Also create named nodes for vocabularies.

This commit is contained in:
Ruben Verborgh 2021-01-02 22:58:35 +01:00
parent 8e138c36d1
commit ae06e99067
14 changed files with 128 additions and 88 deletions

View File

@ -8,7 +8,6 @@ import { NotFoundHttpError } from '../util/errors/NotFoundHttpError';
import { ensureTrailingSlash } from '../util/PathUtil'; import { ensureTrailingSlash } from '../util/PathUtil';
import { generateResourceQuads } from '../util/ResourceUtil'; import { generateResourceQuads } from '../util/ResourceUtil';
import { guardedStreamFrom } from '../util/StreamUtil'; import { guardedStreamFrom } from '../util/StreamUtil';
import { toCachedNamedNode } from '../util/UriUtil';
import { PIM, RDF } from '../util/Vocabularies'; import { PIM, RDF } from '../util/Vocabularies';
import { Initializer } from './Initializer'; import { Initializer } from './Initializer';
import namedNode = DataFactory.namedNode; import namedNode = DataFactory.namedNode;
@ -60,7 +59,7 @@ export class RootContainerInitializer extends Initializer {
// Make sure the root container is a pim:Storage // Make sure the root container is a pim:Storage
// This prevents deletion of the root container as storage root containers can not be deleted // This prevents deletion of the root container as storage root containers can not be deleted
metadata.add(RDF.type, toCachedNamedNode(PIM.Storage)); metadata.add(RDF.type, PIM.terms.Storage);
metadata.contentType = TEXT_TURTLE; metadata.contentType = TEXT_TURTLE;

View File

@ -2,6 +2,7 @@ import { DataFactory, Store } from 'n3';
import type { BlankNode, Literal, NamedNode, Quad, Term } from 'rdf-js'; import type { BlankNode, Literal, NamedNode, Quad, Term } from 'rdf-js';
import { getLoggerFor } from '../../logging/LogUtil'; import { getLoggerFor } from '../../logging/LogUtil';
import { toObjectTerm, toCachedNamedNode, isTerm } from '../../util/UriUtil'; import { toObjectTerm, toCachedNamedNode, isTerm } from '../../util/UriUtil';
import { CONTENT_TYPE_TERM } from '../../util/Vocabularies';
import type { ResourceIdentifier } from './ResourceIdentifier'; import type { ResourceIdentifier } from './ResourceIdentifier';
import { isResourceIdentifier } from './ResourceIdentifier'; import { isResourceIdentifier } from './ResourceIdentifier';
@ -223,10 +224,10 @@ export class RepresentationMetadata {
* Shorthand for the CONTENT_TYPE predicate. * Shorthand for the CONTENT_TYPE predicate.
*/ */
public get contentType(): string | undefined { public get contentType(): string | undefined {
return this.get(toCachedNamedNode('contentType'))?.value; return this.get(CONTENT_TYPE_TERM)?.value;
} }
public set contentType(input) { public set contentType(input) {
this.set(toCachedNamedNode('contentType'), input); this.set(CONTENT_TYPE_TERM, input);
} }
} }

View File

@ -16,8 +16,8 @@ import type { Guarded } from '../../util/GuardedStream';
import { isContainerIdentifier } from '../../util/PathUtil'; import { isContainerIdentifier } from '../../util/PathUtil';
import { parseQuads, pushQuad, serializeQuads } from '../../util/QuadUtil'; import { parseQuads, pushQuad, serializeQuads } from '../../util/QuadUtil';
import { generateContainmentQuads, generateResourceQuads } from '../../util/ResourceUtil'; import { generateContainmentQuads, generateResourceQuads } from '../../util/ResourceUtil';
import { toCachedNamedNode, toLiteral } from '../../util/UriUtil'; import { toLiteral } from '../../util/UriUtil';
import { CONTENT_TYPE, DCTERMS, LDP, POSIX, RDF, XSD } from '../../util/Vocabularies'; import { CONTENT_TYPE, DC, LDP, POSIX, RDF, XSD } from '../../util/Vocabularies';
import type { FileIdentifierMapper, ResourceLink } from '../mapping/FileIdentifierMapper'; import type { FileIdentifierMapper, ResourceLink } from '../mapping/FileIdentifierMapper';
import type { DataAccessor } from './DataAccessor'; import type { DataAccessor } from './DataAccessor';
@ -210,9 +210,9 @@ export class FileDataAccessor implements DataAccessor {
*/ */
private async writeMetadata(link: ResourceLink, metadata: RepresentationMetadata): Promise<boolean> { private async writeMetadata(link: ResourceLink, metadata: RepresentationMetadata): Promise<boolean> {
// These are stored by file system conventions // These are stored by file system conventions
metadata.remove(RDF.type, toCachedNamedNode(LDP.Resource)); metadata.remove(RDF.type, LDP.terms.Resource);
metadata.remove(RDF.type, toCachedNamedNode(LDP.Container)); metadata.remove(RDF.type, LDP.terms.Container);
metadata.remove(RDF.type, toCachedNamedNode(LDP.BasicContainer)); metadata.remove(RDF.type, LDP.terms.BasicContainer);
metadata.removeAll(CONTENT_TYPE); metadata.removeAll(CONTENT_TYPE);
const quads = metadata.quads(); const quads = metadata.quads();
const metadataLink = await this.getMetadataLink(link.identifier); const metadataLink = await this.getMetadataLink(link.identifier);
@ -329,10 +329,10 @@ export class FileDataAccessor implements DataAccessor {
*/ */
private generatePosixQuads(subject: NamedNode, stats: Stats): Quad[] { private generatePosixQuads(subject: NamedNode, stats: Stats): Quad[] {
const quads: Quad[] = []; const quads: Quad[] = [];
pushQuad(quads, subject, toCachedNamedNode(POSIX.size), toLiteral(stats.size, XSD.integer)); pushQuad(quads, subject, POSIX.terms.size, toLiteral(stats.size, XSD.terms.integer));
pushQuad(quads, subject, toCachedNamedNode(DCTERMS.modified), toLiteral(stats.mtime.toISOString(), XSD.dateTime)); pushQuad(quads, subject, DC.terms.modified, toLiteral(stats.mtime.toISOString(), XSD.terms.dateTime));
pushQuad(quads, subject, toCachedNamedNode(POSIX.mtime), toLiteral( pushQuad(quads, subject, POSIX.terms.mtime, toLiteral(
Math.floor(stats.mtime.getTime() / 1000), XSD.integer, Math.floor(stats.mtime.getTime() / 1000), XSD.terms.integer,
)); ));
return quads; return quads;
} }

View File

@ -27,7 +27,6 @@ import { guardStream } from '../../util/GuardedStream';
import type { Guarded } from '../../util/GuardedStream'; import type { Guarded } from '../../util/GuardedStream';
import type { IdentifierStrategy } from '../../util/identifiers/IdentifierStrategy'; import type { IdentifierStrategy } from '../../util/identifiers/IdentifierStrategy';
import { isContainerIdentifier } from '../../util/PathUtil'; import { isContainerIdentifier } from '../../util/PathUtil';
import { toCachedNamedNode } from '../../util/UriUtil';
import { CONTENT_TYPE, LDP } from '../../util/Vocabularies'; import { CONTENT_TYPE, LDP } from '../../util/Vocabularies';
import type { DataAccessor } from './DataAccessor'; import type { DataAccessor } from './DataAccessor';
@ -226,7 +225,7 @@ export class SparqlDataAccessor implements DataAccessor {
// Insert new metadata and containment triple // Insert new metadata and containment triple
const insert: GraphQuads[] = [ this.sparqlUpdateGraph(metaName, metadata.quads()) ]; const insert: GraphQuads[] = [ this.sparqlUpdateGraph(metaName, metadata.quads()) ];
if (parent) { if (parent) {
insert.push(this.sparqlUpdateGraph(parent, [ quad(parent, toCachedNamedNode(LDP.contains), name) ])); insert.push(this.sparqlUpdateGraph(parent, [ quad(parent, LDP.terms.contains, name) ]));
} }
// Necessary updates: delete metadata and insert new data // Necessary updates: delete metadata and insert new data
@ -272,7 +271,7 @@ export class SparqlDataAccessor implements DataAccessor {
if (parent) { if (parent) {
update.updates.push({ update.updates.push({
updateType: 'delete', updateType: 'delete',
delete: [ this.sparqlUpdateGraph(parent, [ quad(parent, toCachedNamedNode(LDP.contains), name) ]) ], delete: [ this.sparqlUpdateGraph(parent, [ quad(parent, LDP.terms.contains, name) ]) ],
}); });
} }

View File

@ -2,7 +2,6 @@ import { DataFactory } from 'n3';
import type { NamedNode, Quad } from 'rdf-js'; import type { NamedNode, Quad } from 'rdf-js';
import { RepresentationMetadata } from '../ldp/representation/RepresentationMetadata'; import { RepresentationMetadata } from '../ldp/representation/RepresentationMetadata';
import { pushQuad } from './QuadUtil'; import { pushQuad } from './QuadUtil';
import { toCachedNamedNode } from './UriUtil';
import { LDP, RDF } from './Vocabularies'; import { LDP, RDF } from './Vocabularies';
/** /**
@ -15,10 +14,10 @@ import { LDP, RDF } from './Vocabularies';
export const generateResourceQuads = (subject: NamedNode, isContainer: boolean): Quad[] => { export const generateResourceQuads = (subject: NamedNode, isContainer: boolean): Quad[] => {
const quads: Quad[] = []; const quads: Quad[] = [];
if (isContainer) { if (isContainer) {
pushQuad(quads, subject, toCachedNamedNode(RDF.type), toCachedNamedNode(LDP.Container)); pushQuad(quads, subject, RDF.terms.type, LDP.terms.Container);
pushQuad(quads, subject, toCachedNamedNode(RDF.type), toCachedNamedNode(LDP.BasicContainer)); pushQuad(quads, subject, RDF.terms.type, LDP.terms.BasicContainer);
} }
pushQuad(quads, subject, toCachedNamedNode(RDF.type), toCachedNamedNode(LDP.Resource)); pushQuad(quads, subject, RDF.terms.type, LDP.terms.Resource);
return quads; return quads;
}; };

View File

@ -1,12 +1,12 @@
import { DataFactory } from 'n3'; import { DataFactory } from 'n3';
import type { Literal, NamedNode, Term } from 'rdf-js'; import type { Literal, NamedNode, Term } from 'rdf-js';
import { CONTENT_TYPE } from './Vocabularies'; import { CONTENT_TYPE_TERM } from './Vocabularies';
const { namedNode, literal } = DataFactory; const { namedNode, literal } = DataFactory;
// Shorthands for commonly used predicates // Shorthands for commonly used predicates
const shorthands: Record<string, NamedNode> = { const shorthands: Record<string, NamedNode> = {
contentType: DataFactory.namedNode(CONTENT_TYPE), contentType: CONTENT_TYPE_TERM,
}; };
// Caches named node conversions // Caches named node conversions
@ -63,5 +63,5 @@ export const toObjectTerm = <T extends Term>(object: T | string, preferLiteral =
* @param object - Object value. * @param object - Object value.
* @param dataType - Object data type (as string). * @param dataType - Object data type (as string).
*/ */
export const toLiteral = (object: string | number, dataType: string | NamedNode): Literal => export const toLiteral = (object: string | number, dataType: NamedNode): Literal =>
DataFactory.literal(object, toCachedNamedNode(dataType)); DataFactory.literal(object, toCachedNamedNode(dataType));

View File

@ -1,4 +1,6 @@
/* eslint-disable @typescript-eslint/naming-convention, function-paren-newline */ /* eslint-disable @typescript-eslint/naming-convention, function-paren-newline */
import { namedNode } from '@rdfjs/data-model';
import type { NamedNode } from 'rdf-js';
type PrefixResolver<T> = (localName: string) => T; type PrefixResolver<T> = (localName: string) => T;
type RecordOf<TKey extends any[], TValue> = Record<TKey[number], TValue>; type RecordOf<TKey extends any[], TValue> = Record<TKey[number], TValue>;
@ -10,25 +12,54 @@ export type Namespace<TKey extends any[], TValue> =
* Creates a function that expands local names from the given base URI, * Creates a function that expands local names from the given base URI,
* and exports the given local names as properties on the returned object. * and exports the given local names as properties on the returned object.
*/ */
export const createNamespace = <T extends string>(baseUri: string, ...localNames: T[]): export const createNamespace = <TKey extends string, TValue>(
Namespace<typeof localNames, string> => { baseUri: string,
toValue: (expanded: string) => TValue,
...localNames: TKey[]):
Namespace<typeof localNames, TValue> => {
// Create a function that expands local names // Create a function that expands local names
const expanded = {} as Record<string, string>; const expanded = {} as Record<string, TValue>;
const namespace = ((localName: string): string => { const namespace = ((localName: string): TValue => {
if (!(localName in expanded)) { if (!(localName in expanded)) {
expanded[localName] = `${baseUri}${localName}`; expanded[localName] = toValue(`${baseUri}${localName}`);
} }
return expanded[localName]; return expanded[localName];
}) as Namespace<typeof localNames, string>; }) as Namespace<typeof localNames, TValue>;
// Expose the listed local names as properties // Expose the listed local names as properties
for (const localName of localNames) { for (const localName of localNames) {
(namespace as RecordOf<typeof localNames, string>)[localName] = namespace(localName); (namespace as RecordOf<typeof localNames, TValue>)[localName] = namespace(localName);
} }
return namespace; return namespace;
}; };
export const ACL = createNamespace('http://www.w3.org/ns/auth/acl#', /**
* Creates a function that expands local names from the given base URI into strings,
* and exports the given local names as properties on the returned object.
*/
export const createUriNamespace = <T extends string>(baseUri: string, ...localNames: T[]):
Namespace<typeof localNames, string> =>
createNamespace(baseUri, (expanded): string => expanded, ...localNames);
/**
* Creates a function that expands local names from the given base URI into named nodes,
* and exports the given local names as properties on the returned object.
*/
export const createTermNamespace = <T extends string>(baseUri: string, ...localNames: T[]):
Namespace<typeof localNames, NamedNode> =>
createNamespace(baseUri, namedNode, ...localNames);
/**
* Creates a function that expands local names from the given base URI into string,
* and exports the given local names as properties on the returned object.
* Under the `terms` property, it exposes the expanded local names as named nodes.
*/
export const createUriAndTermNamespace = <T extends string>(baseUri: string, ...localNames: T[]):
Namespace<typeof localNames, string> & { terms: Namespace<typeof localNames, NamedNode> } =>
Object.assign(createUriNamespace(baseUri, ...localNames),
{ terms: createTermNamespace(baseUri, ...localNames) });
export const ACL = createUriAndTermNamespace('http://www.w3.org/ns/auth/acl#',
'accessTo', 'accessTo',
'agent', 'agent',
'agentClass', 'agentClass',
@ -41,21 +72,21 @@ export const ACL = createNamespace('http://www.w3.org/ns/auth/acl#',
'Control', 'Control',
); );
export const DCTERMS = createNamespace('http://purl.org/dc/terms/', export const DC = createUriAndTermNamespace('http://purl.org/dc/terms/',
'modified', 'modified',
); );
export const FOAF = createNamespace('http://xmlns.com/foaf/0.1/', export const FOAF = createUriAndTermNamespace('http://xmlns.com/foaf/0.1/',
'Agent', 'Agent',
'AuthenticatedAgent', 'AuthenticatedAgent',
); );
export const HTTP = createNamespace('urn:solid:http:', export const HTTP = createUriAndTermNamespace('urn:solid:http:',
'location', 'location',
'slug', 'slug',
); );
export const LDP = createNamespace('http://www.w3.org/ns/ldp#', export const LDP = createUriAndTermNamespace('http://www.w3.org/ns/ldp#',
'contains', 'contains',
'BasicContainer', 'BasicContainer',
@ -63,27 +94,28 @@ export const LDP = createNamespace('http://www.w3.org/ns/ldp#',
'Resource', 'Resource',
); );
export const MA = createNamespace('http://www.w3.org/ns/ma-ont#', export const MA = createUriAndTermNamespace('http://www.w3.org/ns/ma-ont#',
'format', 'format',
); );
export const PIM = createNamespace('http://www.w3.org/ns/pim/space#', export const PIM = createUriAndTermNamespace('http://www.w3.org/ns/pim/space#',
'Storage', 'Storage',
); );
export const POSIX = createNamespace('http://www.w3.org/ns/posix/stat#', export const POSIX = createUriAndTermNamespace('http://www.w3.org/ns/posix/stat#',
'mtime', 'mtime',
'size', 'size',
); );
export const RDF = createNamespace('http://www.w3.org/1999/02/22-rdf-syntax-ns#', export const RDF = createUriAndTermNamespace('http://www.w3.org/1999/02/22-rdf-syntax-ns#',
'type', 'type',
); );
export const XSD = createNamespace('http://www.w3.org/2001/XMLSchema#', export const XSD = createUriAndTermNamespace('http://www.w3.org/2001/XMLSchema#',
'dateTime', 'dateTime',
'integer', 'integer',
); );
// Alias for most commonly used URI // Alias for most commonly used URI
export const CONTENT_TYPE = MA.format; export const CONTENT_TYPE = MA.format;
export const CONTENT_TYPE_TERM = MA.terms.format;

View File

@ -1,7 +1,6 @@
import { createResponse } from 'node-mocks-http'; import { createResponse } from 'node-mocks-http';
import { LinkRelMetadataWriter } from '../../../../../src/ldp/http/metadata/LinkRelMetadataWriter'; import { LinkRelMetadataWriter } from '../../../../../src/ldp/http/metadata/LinkRelMetadataWriter';
import { RepresentationMetadata } from '../../../../../src/ldp/representation/RepresentationMetadata'; import { RepresentationMetadata } from '../../../../../src/ldp/representation/RepresentationMetadata';
import { toCachedNamedNode } from '../../../../../src/util/UriUtil';
import { LDP, RDF } from '../../../../../src/util/Vocabularies'; import { LDP, RDF } from '../../../../../src/util/Vocabularies';
describe('A LinkRelMetadataWriter', (): void => { describe('A LinkRelMetadataWriter', (): void => {
@ -9,7 +8,7 @@ describe('A LinkRelMetadataWriter', (): void => {
it('adds the correct link headers.', async(): Promise<void> => { it('adds the correct link headers.', async(): Promise<void> => {
const response = createResponse(); const response = createResponse();
const metadata = new RepresentationMetadata({ [RDF.type]: toCachedNamedNode(LDP.Resource), unused: 'text' }); const metadata = new RepresentationMetadata({ [RDF.type]: LDP.terms.Resource, unused: 'text' });
await expect(writer.handle({ response, metadata })).resolves.toBeUndefined(); await expect(writer.handle({ response, metadata })).resolves.toBeUndefined();
expect(response.getHeaders()).toEqual({ link: `<${LDP.Resource}>; rel="type"` }); expect(response.getHeaders()).toEqual({ link: `<${LDP.Resource}>; rel="type"` });
}); });

View File

@ -18,7 +18,6 @@ import type { Guarded } from '../../../src/util/GuardedStream';
import { SingleRootIdentifierStrategy } from '../../../src/util/identifiers/SingleRootIdentifierStrategy'; import { SingleRootIdentifierStrategy } from '../../../src/util/identifiers/SingleRootIdentifierStrategy';
import * as quadUtil from '../../../src/util/QuadUtil'; import * as quadUtil from '../../../src/util/QuadUtil';
import { guardedStreamFrom } from '../../../src/util/StreamUtil'; import { guardedStreamFrom } from '../../../src/util/StreamUtil';
import { toCachedNamedNode } from '../../../src/util/UriUtil';
import { CONTENT_TYPE, HTTP, LDP, PIM, RDF } from '../../../src/util/Vocabularies'; import { CONTENT_TYPE, HTTP, LDP, PIM, RDF } from '../../../src/util/Vocabularies';
import quad = DataFactory.quad; import quad = DataFactory.quad;
import namedNode = DataFactory.namedNode; import namedNode = DataFactory.namedNode;
@ -160,7 +159,7 @@ describe('A DataAccessorBasedStore', (): void => {
it('errors when trying to create a container with non-RDF data.', async(): Promise<void> => { it('errors when trying to create a container with non-RDF data.', async(): Promise<void> => {
const resourceID = { path: root }; const resourceID = { path: root };
representation.metadata.add(RDF.type, toCachedNamedNode(LDP.Container)); representation.metadata.add(RDF.type, LDP.terms.Container);
await expect(store.addResource(resourceID, representation)).rejects.toThrow(BadRequestHttpError); await expect(store.addResource(resourceID, representation)).rejects.toThrow(BadRequestHttpError);
}); });
@ -169,7 +168,7 @@ describe('A DataAccessorBasedStore', (): void => {
const mock = jest.spyOn(quadUtil, 'parseQuads').mockImplementationOnce(async(): Promise<any> => { const mock = jest.spyOn(quadUtil, 'parseQuads').mockImplementationOnce(async(): Promise<any> => {
throw 'apple'; throw 'apple';
}); });
representation.metadata.add(RDF.type, toCachedNamedNode(LDP.Container)); representation.metadata.add(RDF.type, LDP.terms.Container);
await expect(store.addResource(resourceID, representation)).rejects.toBe('apple'); await expect(store.addResource(resourceID, representation)).rejects.toBe('apple');
mock.mockRestore(); mock.mockRestore();
}); });
@ -186,7 +185,7 @@ describe('A DataAccessorBasedStore', (): void => {
it('can write containers.', async(): Promise<void> => { it('can write containers.', async(): Promise<void> => {
const resourceID = { path: root }; const resourceID = { path: root };
representation.metadata.add(RDF.type, toCachedNamedNode(LDP.Container)); representation.metadata.add(RDF.type, LDP.terms.Container);
representation.metadata.contentType = 'text/turtle'; representation.metadata.contentType = 'text/turtle';
representation.data = guardedStreamFrom([ `<${`${root}resource/`}> a <coolContainer>.` ]); representation.data = guardedStreamFrom([ `<${`${root}resource/`}> a <coolContainer>.` ]);
const result = await store.addResource(resourceID, representation); const result = await store.addResource(resourceID, representation);
@ -269,14 +268,14 @@ describe('A DataAccessorBasedStore', (): void => {
representation.metadata.identifier = DataFactory.namedNode(resourceID.path); representation.metadata.identifier = DataFactory.namedNode(resourceID.path);
const newRepresentation = { ...representation }; const newRepresentation = { ...representation };
newRepresentation.metadata = new RepresentationMetadata(representation.metadata); newRepresentation.metadata = new RepresentationMetadata(representation.metadata);
newRepresentation.metadata.add(RDF.type, toCachedNamedNode(LDP.Container)); newRepresentation.metadata.add(RDF.type, LDP.terms.Container);
await expect(store.setRepresentation(resourceID, newRepresentation)) await expect(store.setRepresentation(resourceID, newRepresentation))
.rejects.toThrow(new ConflictHttpError('Input resource type does not match existing resource type.')); .rejects.toThrow(new ConflictHttpError('Input resource type does not match existing resource type.'));
}); });
it('will error if the ending slash does not match its resource type.', async(): Promise<void> => { it('will error if the ending slash does not match its resource type.', async(): Promise<void> => {
const resourceID = { path: `${root}resource` }; const resourceID = { path: `${root}resource` };
representation.metadata.add(RDF.type, toCachedNamedNode(LDP.Container)); representation.metadata.add(RDF.type, LDP.terms.Container);
await expect(store.setRepresentation(resourceID, representation)).rejects.toThrow( await expect(store.setRepresentation(resourceID, representation)).rejects.toThrow(
new BadRequestHttpError('Containers should have a `/` at the end of their path, resources should not.'), new BadRequestHttpError('Containers should have a `/` at the end of their path, resources should not.'),
); );
@ -294,7 +293,7 @@ describe('A DataAccessorBasedStore', (): void => {
it('errors when trying to create a container with non-RDF data.', async(): Promise<void> => { it('errors when trying to create a container with non-RDF data.', async(): Promise<void> => {
const resourceID = { path: `${root}container/` }; const resourceID = { path: `${root}container/` };
representation.metadata.add(RDF.type, toCachedNamedNode(LDP.Container)); representation.metadata.add(RDF.type, LDP.terms.Container);
await expect(store.setRepresentation(resourceID, representation)).rejects.toThrow(BadRequestHttpError); await expect(store.setRepresentation(resourceID, representation)).rejects.toThrow(BadRequestHttpError);
}); });
@ -332,7 +331,7 @@ describe('A DataAccessorBasedStore', (): void => {
it('errors when trying to create a container with containment triples.', async(): Promise<void> => { it('errors when trying to create a container with containment triples.', async(): Promise<void> => {
const resourceID = { path: `${root}container/` }; const resourceID = { path: `${root}container/` };
representation.metadata.add(RDF.type, toCachedNamedNode(LDP.Container)); representation.metadata.add(RDF.type, LDP.terms.Container);
representation.metadata.contentType = 'text/turtle'; representation.metadata.contentType = 'text/turtle';
representation.metadata.identifier = DataFactory.namedNode(`${root}resource/`); representation.metadata.identifier = DataFactory.namedNode(`${root}resource/`);
representation.data = guardedStreamFrom( representation.data = guardedStreamFrom(
@ -390,7 +389,7 @@ describe('A DataAccessorBasedStore', (): void => {
}); });
it('will error when deleting a root storage container.', async(): Promise<void> => { it('will error when deleting a root storage container.', async(): Promise<void> => {
representation.metadata.add(RDF.type, toCachedNamedNode(PIM.Storage)); representation.metadata.add(RDF.type, PIM.terms.Storage);
accessor.data[`${root}container`] = representation; accessor.data[`${root}container`] = representation;
await expect(store.deleteResource({ path: `${root}container` })) await expect(store.deleteResource({ path: `${root}container` }))
.rejects.toThrow(new MethodNotAllowedHttpError('Cannot delete a root storage container.')); .rejects.toThrow(new MethodNotAllowedHttpError('Cannot delete a root storage container.'));

View File

@ -1,6 +1,6 @@
import 'jest-rdf'; import 'jest-rdf';
import type { Readable } from 'stream'; import type { Readable } from 'stream';
import { DataFactory } from 'n3'; import { namedNode } from '@rdfjs/data-model';
import type { Representation } from '../../../../src/ldp/representation/Representation'; import type { Representation } from '../../../../src/ldp/representation/Representation';
import { RepresentationMetadata } from '../../../../src/ldp/representation/RepresentationMetadata'; import { RepresentationMetadata } from '../../../../src/ldp/representation/RepresentationMetadata';
import { FileDataAccessor } from '../../../../src/storage/accessors/FileDataAccessor'; import { FileDataAccessor } from '../../../../src/storage/accessors/FileDataAccessor';
@ -12,8 +12,8 @@ import type { SystemError } from '../../../../src/util/errors/SystemError';
import { UnsupportedMediaTypeHttpError } from '../../../../src/util/errors/UnsupportedMediaTypeHttpError'; import { UnsupportedMediaTypeHttpError } from '../../../../src/util/errors/UnsupportedMediaTypeHttpError';
import type { Guarded } from '../../../../src/util/GuardedStream'; import type { Guarded } from '../../../../src/util/GuardedStream';
import { guardedStreamFrom, readableToString } from '../../../../src/util/StreamUtil'; import { guardedStreamFrom, readableToString } from '../../../../src/util/StreamUtil';
import { toCachedNamedNode, toLiteral } from '../../../../src/util/UriUtil'; import { toLiteral } from '../../../../src/util/UriUtil';
import { CONTENT_TYPE, DCTERMS, LDP, POSIX, RDF, XSD } from '../../../../src/util/Vocabularies'; import { CONTENT_TYPE, DC, LDP, POSIX, RDF, XSD } from '../../../../src/util/Vocabularies';
import { mockFs } from '../../../util/Util'; import { mockFs } from '../../../util/Util';
jest.mock('fs'); jest.mock('fs');
@ -98,9 +98,9 @@ describe('A FileDataAccessor', (): void => {
expect(metadata.identifier.value).toBe(`${base}resource.ttl`); expect(metadata.identifier.value).toBe(`${base}resource.ttl`);
expect(metadata.contentType).toBe('text/turtle'); expect(metadata.contentType).toBe('text/turtle');
expect(metadata.get(RDF.type)?.value).toBe(LDP.Resource); expect(metadata.get(RDF.type)?.value).toBe(LDP.Resource);
expect(metadata.get(POSIX.size)).toEqualRdfTerm(toLiteral('data'.length, XSD.integer)); expect(metadata.get(POSIX.size)).toEqualRdfTerm(toLiteral('data'.length, XSD.terms.integer));
expect(metadata.get(DCTERMS.modified)).toEqualRdfTerm(toLiteral(now.toISOString(), XSD.dateTime)); expect(metadata.get(DC.modified)).toEqualRdfTerm(toLiteral(now.toISOString(), XSD.terms.dateTime));
expect(metadata.get(POSIX.mtime)).toEqualRdfTerm(toLiteral(Math.floor(now.getTime() / 1000), XSD.integer)); expect(metadata.get(POSIX.mtime)).toEqualRdfTerm(toLiteral(Math.floor(now.getTime() / 1000), XSD.terms.integer));
}); });
it('generates the metadata for a container and its non-meta children.', async(): Promise<void> => { it('generates the metadata for a container and its non-meta children.', async(): Promise<void> => {
@ -108,23 +108,23 @@ describe('A FileDataAccessor', (): void => {
metadata = await accessor.getMetadata({ path: `${base}container/` }); metadata = await accessor.getMetadata({ path: `${base}container/` });
expect(metadata.identifier.value).toBe(`${base}container/`); expect(metadata.identifier.value).toBe(`${base}container/`);
expect(metadata.getAll(RDF.type)).toEqualRdfTermArray( expect(metadata.getAll(RDF.type)).toEqualRdfTermArray(
[ toCachedNamedNode(LDP.Container), toCachedNamedNode(LDP.BasicContainer), toCachedNamedNode(LDP.Resource) ], [ LDP.terms.Container, LDP.terms.BasicContainer, LDP.terms.Resource ],
); );
expect(metadata.get(POSIX.size)).toEqualRdfTerm(toLiteral(0, XSD.integer)); expect(metadata.get(POSIX.size)).toEqualRdfTerm(toLiteral(0, XSD.terms.integer));
expect(metadata.get(DCTERMS.modified)).toEqualRdfTerm(toLiteral(now.toISOString(), XSD.dateTime)); expect(metadata.get(DC.modified)).toEqualRdfTerm(toLiteral(now.toISOString(), XSD.terms.dateTime));
expect(metadata.get(POSIX.mtime)).toEqualRdfTerm(toLiteral(Math.floor(now.getTime() / 1000), XSD.integer)); expect(metadata.get(POSIX.mtime)).toEqualRdfTerm(toLiteral(Math.floor(now.getTime() / 1000), XSD.terms.integer));
expect(metadata.getAll(LDP.contains)).toEqualRdfTermArray( expect(metadata.getAll(LDP.contains)).toEqualRdfTermArray(
[ toCachedNamedNode(`${base}container/resource`), toCachedNamedNode(`${base}container/container2/`) ], [ namedNode(`${base}container/resource`), namedNode(`${base}container/container2/`) ],
); );
const childQuads = metadata.quads().filter((quad): boolean => const childQuads = metadata.quads().filter((quad): boolean =>
quad.subject.value === `${base}container/resource`); quad.subject.value === `${base}container/resource`);
const childMetadata = new RepresentationMetadata({ path: `${base}container/resource` }).addQuads(childQuads); const childMetadata = new RepresentationMetadata({ path: `${base}container/resource` }).addQuads(childQuads);
expect(childMetadata.get(RDF.type)?.value).toBe(LDP.Resource); expect(childMetadata.get(RDF.type)?.value).toBe(LDP.Resource);
expect(childMetadata.get(POSIX.size)).toEqualRdfTerm(toLiteral('data'.length, XSD.integer)); expect(childMetadata.get(POSIX.size)).toEqualRdfTerm(toLiteral('data'.length, XSD.terms.integer));
expect(childMetadata.get(DCTERMS.modified)).toEqualRdfTerm(toLiteral(now.toISOString(), XSD.dateTime)); expect(childMetadata.get(DC.modified)).toEqualRdfTerm(toLiteral(now.toISOString(), XSD.terms.dateTime));
expect(childMetadata.get(POSIX.mtime)).toEqualRdfTerm(toLiteral(Math.floor(now.getTime() / 1000), expect(childMetadata.get(POSIX.mtime)).toEqualRdfTerm(toLiteral(Math.floor(now.getTime() / 1000),
XSD.integer)); XSD.terms.integer));
}); });
it('adds stored metadata when requesting metadata.', async(): Promise<void> => { it('adds stored metadata when requesting metadata.', async(): Promise<void> => {
@ -168,7 +168,7 @@ describe('A FileDataAccessor', (): void => {
}); });
it('does not write metadata that is stored by the file system.', async(): Promise<void> => { it('does not write metadata that is stored by the file system.', async(): Promise<void> => {
metadata.add(RDF.type, toCachedNamedNode(LDP.Resource)); metadata.add(RDF.type, LDP.terms.Resource);
await expect(accessor.writeDocument({ path: `${base}resource` }, data, metadata)).resolves.toBeUndefined(); await expect(accessor.writeDocument({ path: `${base}resource` }, data, metadata)).resolves.toBeUndefined();
expect(cache.data.resource).toBe('data'); expect(cache.data.resource).toBe('data');
expect(cache.data['resource.meta']).toBeUndefined(); expect(cache.data['resource.meta']).toBeUndefined();
@ -212,7 +212,7 @@ describe('A FileDataAccessor', (): void => {
it('updates the filename if the content-type gets updated.', async(): Promise<void> => { it('updates the filename if the content-type gets updated.', async(): Promise<void> => {
cache.data = { 'resource$.ttl': '<this> <is> <data>.', 'resource.meta': '<this> <is> <metadata>.' }; cache.data = { 'resource$.ttl': '<this> <is> <data>.', 'resource.meta': '<this> <is> <metadata>.' };
metadata.identifier = DataFactory.namedNode(`${base}resource`); metadata.identifier = namedNode(`${base}resource`);
metadata.contentType = 'text/plain'; metadata.contentType = 'text/plain';
metadata.add('new', 'metadata'); metadata.add('new', 'metadata');
await expect(accessor.writeDocument({ path: `${base}resource` }, data, metadata)) await expect(accessor.writeDocument({ path: `${base}resource` }, data, metadata))
@ -224,7 +224,7 @@ describe('A FileDataAccessor', (): void => {
}); });
it('does not try to update the content-type if there is no original file.', async(): Promise<void> => { it('does not try to update the content-type if there is no original file.', async(): Promise<void> => {
metadata.identifier = DataFactory.namedNode(`${base}resource.txt`); metadata.identifier = namedNode(`${base}resource.txt`);
metadata.contentType = 'text/turtle'; metadata.contentType = 'text/turtle';
metadata.add('new', 'metadata'); metadata.add('new', 'metadata');
await expect(accessor.writeDocument({ path: `${base}resource.txt` }, data, metadata)) await expect(accessor.writeDocument({ path: `${base}resource.txt` }, data, metadata))
@ -289,7 +289,7 @@ describe('A FileDataAccessor', (): void => {
it('does not write metadata that is stored by the file system.', async(): Promise<void> => { it('does not write metadata that is stored by the file system.', async(): Promise<void> => {
metadata = new RepresentationMetadata( metadata = new RepresentationMetadata(
{ path: `${base}container/` }, { path: `${base}container/` },
{ [RDF.type]: [ toCachedNamedNode(LDP.BasicContainer), toCachedNamedNode(LDP.Resource) ]}, { [RDF.type]: [ LDP.terms.BasicContainer, LDP.terms.Resource ]},
); );
await expect(accessor.writeContainer({ path: `${base}container/` }, metadata)).resolves.toBeUndefined(); await expect(accessor.writeContainer({ path: `${base}container/` }, metadata)).resolves.toBeUndefined();
expect(cache.data.container).toEqual({}); expect(cache.data.container).toEqual({});

View File

@ -1,12 +1,12 @@
import 'jest-rdf'; import 'jest-rdf';
import type { Readable } from 'stream'; import type { Readable } from 'stream';
import { DataFactory } from 'n3';
import { RepresentationMetadata } from '../../../../src/ldp/representation/RepresentationMetadata'; import { RepresentationMetadata } from '../../../../src/ldp/representation/RepresentationMetadata';
import { InMemoryDataAccessor } from '../../../../src/storage/accessors/InMemoryDataAccessor'; import { InMemoryDataAccessor } from '../../../../src/storage/accessors/InMemoryDataAccessor';
import { APPLICATION_OCTET_STREAM } from '../../../../src/util/ContentTypes'; import { APPLICATION_OCTET_STREAM } from '../../../../src/util/ContentTypes';
import { NotFoundHttpError } from '../../../../src/util/errors/NotFoundHttpError'; import { NotFoundHttpError } from '../../../../src/util/errors/NotFoundHttpError';
import type { Guarded } from '../../../../src/util/GuardedStream'; import type { Guarded } from '../../../../src/util/GuardedStream';
import { guardedStreamFrom, readableToString } from '../../../../src/util/StreamUtil'; import { guardedStreamFrom, readableToString } from '../../../../src/util/StreamUtil';
import { toCachedNamedNode } from '../../../../src/util/UriUtil';
import { CONTENT_TYPE, LDP, RDF } from '../../../../src/util/Vocabularies'; import { CONTENT_TYPE, LDP, RDF } from '../../../../src/util/Vocabularies';
describe('An InMemoryDataAccessor', (): void => { describe('An InMemoryDataAccessor', (): void => {
@ -80,13 +80,13 @@ describe('An InMemoryDataAccessor', (): void => {
await expect(accessor.writeContainer({ path: `${base}container/container2` }, metadata)).resolves.toBeUndefined(); await expect(accessor.writeContainer({ path: `${base}container/container2` }, metadata)).resolves.toBeUndefined();
metadata = await accessor.getMetadata({ path: `${base}container/` }); metadata = await accessor.getMetadata({ path: `${base}container/` });
expect(metadata.getAll(LDP.contains)).toEqualRdfTermArray( expect(metadata.getAll(LDP.contains)).toEqualRdfTermArray(
[ toCachedNamedNode(`${base}container/resource`), toCachedNamedNode(`${base}container/container2/`) ], [ DataFactory.namedNode(`${base}container/resource`), DataFactory.namedNode(`${base}container/container2/`) ],
); );
}); });
it('adds stored metadata when requesting document metadata.', async(): Promise<void> => { it('adds stored metadata when requesting document metadata.', async(): Promise<void> => {
const identifier = { path: `${base}resource` }; const identifier = { path: `${base}resource` };
const inputMetadata = new RepresentationMetadata(identifier, { [RDF.type]: toCachedNamedNode(LDP.Resource) }); const inputMetadata = new RepresentationMetadata(identifier, { [RDF.type]: LDP.terms.Resource });
await expect(accessor.writeDocument(identifier, data, inputMetadata)).resolves.toBeUndefined(); await expect(accessor.writeDocument(identifier, data, inputMetadata)).resolves.toBeUndefined();
metadata = await accessor.getMetadata(identifier); metadata = await accessor.getMetadata(identifier);
expect(metadata.identifier.value).toBe(`${base}resource`); expect(metadata.identifier.value).toBe(`${base}resource`);
@ -97,7 +97,7 @@ describe('An InMemoryDataAccessor', (): void => {
it('adds stored metadata when requesting container metadata.', async(): Promise<void> => { it('adds stored metadata when requesting container metadata.', async(): Promise<void> => {
const identifier = { path: `${base}container/` }; const identifier = { path: `${base}container/` };
const inputMetadata = new RepresentationMetadata(identifier, { [RDF.type]: toCachedNamedNode(LDP.Container) }); const inputMetadata = new RepresentationMetadata(identifier, { [RDF.type]: LDP.terms.Container });
await expect(accessor.writeContainer(identifier, inputMetadata)).resolves.toBeUndefined(); await expect(accessor.writeContainer(identifier, inputMetadata)).resolves.toBeUndefined();
metadata = await accessor.getMetadata(identifier); metadata = await accessor.getMetadata(identifier);
@ -109,7 +109,7 @@ describe('An InMemoryDataAccessor', (): void => {
it('can overwrite the metadata of an existing container without overwriting children.', async(): Promise<void> => { it('can overwrite the metadata of an existing container without overwriting children.', async(): Promise<void> => {
const identifier = { path: `${base}container/` }; const identifier = { path: `${base}container/` };
const inputMetadata = new RepresentationMetadata(identifier, { [RDF.type]: toCachedNamedNode(LDP.Container) }); const inputMetadata = new RepresentationMetadata(identifier, { [RDF.type]: LDP.terms.Container });
await expect(accessor.writeContainer(identifier, inputMetadata)).resolves.toBeUndefined(); await expect(accessor.writeContainer(identifier, inputMetadata)).resolves.toBeUndefined();
const resourceMetadata = new RepresentationMetadata(); const resourceMetadata = new RepresentationMetadata();
await expect(accessor.writeDocument( await expect(accessor.writeDocument(
@ -117,7 +117,7 @@ describe('An InMemoryDataAccessor', (): void => {
)).resolves.toBeUndefined(); )).resolves.toBeUndefined();
const newMetadata = new RepresentationMetadata(inputMetadata); const newMetadata = new RepresentationMetadata(inputMetadata);
newMetadata.add(RDF.type, toCachedNamedNode(LDP.BasicContainer)); newMetadata.add(RDF.type, LDP.terms.BasicContainer);
await expect(accessor.writeContainer(identifier, newMetadata)).resolves.toBeUndefined(); await expect(accessor.writeContainer(identifier, newMetadata)).resolves.toBeUndefined();
metadata = await accessor.getMetadata(identifier); metadata = await accessor.getMetadata(identifier);
@ -135,7 +135,7 @@ describe('An InMemoryDataAccessor', (): void => {
it('can write to the root container without overriding its children.', async(): Promise<void> => { it('can write to the root container without overriding its children.', async(): Promise<void> => {
const identifier = { path: `${base}` }; const identifier = { path: `${base}` };
const inputMetadata = new RepresentationMetadata(identifier, { [RDF.type]: toCachedNamedNode(LDP.Container) }); const inputMetadata = new RepresentationMetadata(identifier, { [RDF.type]: LDP.terms.Container });
await expect(accessor.writeContainer(identifier, inputMetadata)).resolves.toBeUndefined(); await expect(accessor.writeContainer(identifier, inputMetadata)).resolves.toBeUndefined();
const resourceMetadata = new RepresentationMetadata(); const resourceMetadata = new RepresentationMetadata();
await expect(accessor.writeDocument( await expect(accessor.writeDocument(

View File

@ -14,8 +14,7 @@ import { UnsupportedMediaTypeHttpError } from '../../../../src/util/errors/Unsup
import type { Guarded } from '../../../../src/util/GuardedStream'; import type { Guarded } from '../../../../src/util/GuardedStream';
import { SingleRootIdentifierStrategy } from '../../../../src/util/identifiers/SingleRootIdentifierStrategy'; import { SingleRootIdentifierStrategy } from '../../../../src/util/identifiers/SingleRootIdentifierStrategy';
import { guardedStreamFrom } from '../../../../src/util/StreamUtil'; import { guardedStreamFrom } from '../../../../src/util/StreamUtil';
import { toCachedNamedNode } from '../../../../src/util/UriUtil'; import { CONTENT_TYPE_TERM, LDP, RDF } from '../../../../src/util/Vocabularies';
import { CONTENT_TYPE, LDP, RDF } from '../../../../src/util/Vocabularies';
const { literal, namedNode, quad } = DataFactory; const { literal, namedNode, quad } = DataFactory;
@ -94,7 +93,7 @@ describe('A SparqlDataAccessor', (): void => {
metadata = await accessor.getMetadata({ path: 'http://identifier' }); metadata = await accessor.getMetadata({ path: 'http://identifier' });
expect(metadata.quads()).toBeRdfIsomorphic([ expect(metadata.quads()).toBeRdfIsomorphic([
quad(namedNode('this'), namedNode('a'), namedNode('triple')), quad(namedNode('this'), namedNode('a'), namedNode('triple')),
quad(namedNode('http://identifier'), toCachedNamedNode(CONTENT_TYPE), literal(INTERNAL_QUADS)), quad(namedNode('http://identifier'), CONTENT_TYPE_TERM, literal(INTERNAL_QUADS)),
]); ]);
expect(fetchTriples).toHaveBeenCalledTimes(1); expect(fetchTriples).toHaveBeenCalledTimes(1);
@ -135,7 +134,7 @@ describe('A SparqlDataAccessor', (): void => {
it('overwrites the metadata when writing a container and updates parent.', async(): Promise<void> => { it('overwrites the metadata when writing a container and updates parent.', async(): Promise<void> => {
metadata = new RepresentationMetadata({ path: 'http://test.com/container/' }, metadata = new RepresentationMetadata({ path: 'http://test.com/container/' },
{ [RDF.type]: [ toCachedNamedNode(LDP.Resource), toCachedNamedNode(LDP.Container) ]}); { [RDF.type]: [ LDP.terms.Resource, LDP.terms.Container ]});
await expect(accessor.writeContainer({ path: 'http://test.com/container/' }, metadata)).resolves.toBeUndefined(); await expect(accessor.writeContainer({ path: 'http://test.com/container/' }, metadata)).resolves.toBeUndefined();
expect(fetchUpdate).toHaveBeenCalledTimes(1); expect(fetchUpdate).toHaveBeenCalledTimes(1);
@ -154,7 +153,7 @@ describe('A SparqlDataAccessor', (): void => {
it('does not write containment triples when writing to a root container.', async(): Promise<void> => { it('does not write containment triples when writing to a root container.', async(): Promise<void> => {
metadata = new RepresentationMetadata({ path: 'http://test.com/' }, metadata = new RepresentationMetadata({ path: 'http://test.com/' },
{ [RDF.type]: [ toCachedNamedNode(LDP.Resource), toCachedNamedNode(LDP.Container) ]}); { [RDF.type]: [ LDP.terms.Resource, LDP.terms.Container ]});
await expect(accessor.writeContainer({ path: 'http://test.com/' }, metadata)).resolves.toBeUndefined(); await expect(accessor.writeContainer({ path: 'http://test.com/' }, metadata)).resolves.toBeUndefined();
expect(fetchUpdate).toHaveBeenCalledTimes(1); expect(fetchUpdate).toHaveBeenCalledTimes(1);
@ -172,7 +171,7 @@ describe('A SparqlDataAccessor', (): void => {
it('overwrites the data and metadata when writing a resource and updates parent.', async(): Promise<void> => { it('overwrites the data and metadata when writing a resource and updates parent.', async(): Promise<void> => {
metadata = new RepresentationMetadata({ path: 'http://test.com/container/resource' }, metadata = new RepresentationMetadata({ path: 'http://test.com/container/resource' },
{ [RDF.type]: [ toCachedNamedNode(LDP.Resource) ]}); { [RDF.type]: [ LDP.terms.Resource ]});
await expect(accessor.writeDocument({ path: 'http://test.com/container/resource' }, data, metadata)) await expect(accessor.writeDocument({ path: 'http://test.com/container/resource' }, data, metadata))
.resolves.toBeUndefined(); .resolves.toBeUndefined();
@ -191,7 +190,7 @@ describe('A SparqlDataAccessor', (): void => {
it('overwrites the data and metadata when writing an empty resource.', async(): Promise<void> => { it('overwrites the data and metadata when writing an empty resource.', async(): Promise<void> => {
metadata = new RepresentationMetadata({ path: 'http://test.com/container/resource' }, metadata = new RepresentationMetadata({ path: 'http://test.com/container/resource' },
{ [RDF.type]: [ toCachedNamedNode(LDP.Resource) ]}); { [RDF.type]: [ LDP.terms.Resource ]});
const empty = guardedStreamFrom([]); const empty = guardedStreamFrom([]);
await expect(accessor.writeDocument({ path: 'http://test.com/container/resource' }, empty, metadata)) await expect(accessor.writeDocument({ path: 'http://test.com/container/resource' }, empty, metadata))
.resolves.toBeUndefined(); .resolves.toBeUndefined();
@ -210,7 +209,7 @@ describe('A SparqlDataAccessor', (): void => {
it('removes all references when deleting a resource.', async(): Promise<void> => { it('removes all references when deleting a resource.', async(): Promise<void> => {
metadata = new RepresentationMetadata({ path: 'http://test.com/container/' }, metadata = new RepresentationMetadata({ path: 'http://test.com/container/' },
{ [RDF.type]: [ toCachedNamedNode(LDP.Resource), toCachedNamedNode(LDP.Container) ]}); { [RDF.type]: [ LDP.terms.Resource, LDP.terms.Container ]});
await expect(accessor.deleteResource({ path: 'http://test.com/container/' })).resolves.toBeUndefined(); await expect(accessor.deleteResource({ path: 'http://test.com/container/' })).resolves.toBeUndefined();
expect(fetchUpdate).toHaveBeenCalledTimes(1); expect(fetchUpdate).toHaveBeenCalledTimes(1);
@ -224,7 +223,7 @@ describe('A SparqlDataAccessor', (): void => {
it('does not try to remove containment triples when deleting a root container.', async(): Promise<void> => { it('does not try to remove containment triples when deleting a root container.', async(): Promise<void> => {
metadata = new RepresentationMetadata({ path: 'http://test.com/' }, metadata = new RepresentationMetadata({ path: 'http://test.com/' },
{ [RDF.type]: [ toCachedNamedNode(LDP.Resource), toCachedNamedNode(LDP.Container) ]}); { [RDF.type]: [ LDP.terms.Resource, LDP.terms.Container ]});
await expect(accessor.deleteResource({ path: 'http://test.com/' })).resolves.toBeUndefined(); await expect(accessor.deleteResource({ path: 'http://test.com/' })).resolves.toBeUndefined();
expect(fetchUpdate).toHaveBeenCalledTimes(1); expect(fetchUpdate).toHaveBeenCalledTimes(1);

View File

@ -84,7 +84,7 @@ describe('An UriUtil', (): void => {
describe('toLiteral function', (): void => { describe('toLiteral function', (): void => {
it('converts the input to a valid literal with the given type.', async(): Promise<void> => { it('converts the input to a valid literal with the given type.', async(): Promise<void> => {
const expected = literal('5', namedNode(XSD.integer)); const expected = literal('5', namedNode(XSD.integer));
expect(toLiteral(5, XSD.integer)).toEqualRdfTerm(expected); expect(toLiteral(5, XSD.terms.integer)).toEqualRdfTerm(expected);
}); });
}); });
}); });

View File

@ -1,3 +1,4 @@
import { namedNode } from '@rdfjs/data-model';
import { LDP } from '../../../src/util/Vocabularies'; import { LDP } from '../../../src/util/Vocabularies';
describe('Vocabularies', (): void => { describe('Vocabularies', (): void => {
@ -6,12 +7,24 @@ describe('Vocabularies', (): void => {
expect(LDP('new')).toBe('http://www.w3.org/ns/ldp#new'); expect(LDP('new')).toBe('http://www.w3.org/ns/ldp#new');
}); });
it('caches new properties.', (): void => { it('can create new properties as terms.', (): void => {
expect(LDP('new')).toBe(LDP('new')); expect(LDP.terms('new')).toEqual(namedNode('http://www.w3.org/ns/ldp#new'));
});
it('caches new properties as terms.', (): void => {
expect(LDP.terms('new')).toBe(LDP.terms('new'));
}); });
it('exposes ldp:contains.', (): void => { it('exposes ldp:contains.', (): void => {
expect(LDP.contains).toBe('http://www.w3.org/ns/ldp#contains'); expect(LDP.contains).toBe('http://www.w3.org/ns/ldp#contains');
}); });
it('exposes ldp:contains as a term.', (): void => {
expect(LDP.terms.contains).toEqual(namedNode('http://www.w3.org/ns/ldp#contains'));
});
it('caches ldp:contains as a term.', (): void => {
expect(LDP.terms.contains).toBe(LDP.terms.contains);
});
}); });
}); });