mirror of
https://github.com/CommunitySolidServer/CommunitySolidServer.git
synced 2024-10-03 14:55:10 +00:00
feat: Only accept NamedNodes as predicates for metadata
* refactor: move toCachedNamedNode (private) * chore: only NamedNodes predicates in removes * feat: enforce NamedNode predicates in most cases * feat: getAll only accepts NamedNodes * feat: toCachedNamedNode only accepts string arg * tests: use NamedNodes for getAll calls * test: remove unnecessary string check for coverage * tests: fix NamedNodes in new tests after rebase * feat: metadatawriters store NamedNodes * refactor: toCachedNamedNode as utility function * fix: double write of linkRelMap * test: use the CONTENT_TYPE constant
This commit is contained in:
@@ -181,7 +181,8 @@ describe('A DataAccessorBasedStore', (): void => {
|
||||
expect(result).toMatchObject({ binary: true });
|
||||
expect(await arrayifyStream(result.data)).toEqual([ resourceData ]);
|
||||
expect(result.metadata.contentType).toBe('text/plain');
|
||||
expect(result.metadata.get('AUXILIARY')?.value).toBe(auxiliaryStrategy.getAuxiliaryIdentifier(resourceID).path);
|
||||
expect(result.metadata.get(namedNode('AUXILIARY'))?.value)
|
||||
.toBe(auxiliaryStrategy.getAuxiliaryIdentifier(resourceID).path);
|
||||
});
|
||||
|
||||
it('will return a data stream that matches the metadata for containers.', async(): Promise<void> => {
|
||||
@@ -196,7 +197,8 @@ describe('A DataAccessorBasedStore', (): void => {
|
||||
expect(result).toMatchObject({ binary: false });
|
||||
expect(await arrayifyStream(result.data)).toBeRdfIsomorphic(metaMirror.quads());
|
||||
expect(result.metadata.contentType).toEqual(INTERNAL_QUADS);
|
||||
expect(result.metadata.get('AUXILIARY')?.value).toBe(auxiliaryStrategy.getAuxiliaryIdentifier(resourceID).path);
|
||||
expect(result.metadata.get(namedNode('AUXILIARY'))?.value)
|
||||
.toBe(auxiliaryStrategy.getAuxiliaryIdentifier(resourceID).path);
|
||||
});
|
||||
|
||||
it('will remove containment triples referencing auxiliary resources.', async(): Promise<void> => {
|
||||
@@ -255,13 +257,13 @@ describe('A DataAccessorBasedStore', (): void => {
|
||||
|
||||
it('errors when trying to create a container with non-RDF data.', async(): Promise<void> => {
|
||||
const resourceID = { path: root };
|
||||
representation.metadata.add(RDF.type, LDP.terms.Container);
|
||||
representation.metadata.add(RDF.terms.type, LDP.terms.Container);
|
||||
await expect(store.addResource(resourceID, representation)).rejects.toThrow(BadRequestHttpError);
|
||||
});
|
||||
|
||||
it('can write resources.', async(): Promise<void> => {
|
||||
const resourceID = { path: root };
|
||||
representation.metadata.removeAll(RDF.type);
|
||||
representation.metadata.removeAll(RDF.terms.type);
|
||||
const result = await store.addResource(resourceID, representation);
|
||||
expect(result).toEqual({
|
||||
path: expect.stringMatching(new RegExp(`^${root}[^/]+$`, 'u')),
|
||||
@@ -272,7 +274,7 @@ describe('A DataAccessorBasedStore', (): void => {
|
||||
|
||||
it('can write containers.', async(): Promise<void> => {
|
||||
const resourceID = { path: root };
|
||||
representation.metadata.add(RDF.type, LDP.terms.Container);
|
||||
representation.metadata.add(RDF.terms.type, LDP.terms.Container);
|
||||
representation.metadata.contentType = 'text/turtle';
|
||||
representation.data = guardedStreamFrom([ '<> a <http://test.com/coolContainer>.' ]);
|
||||
const result = await store.addResource(resourceID, representation);
|
||||
@@ -291,8 +293,8 @@ describe('A DataAccessorBasedStore', (): void => {
|
||||
|
||||
it('creates a URI based on the incoming slug.', async(): Promise<void> => {
|
||||
const resourceID = { path: root };
|
||||
representation.metadata.removeAll(RDF.type);
|
||||
representation.metadata.add(SOLID_HTTP.slug, 'newName');
|
||||
representation.metadata.removeAll(RDF.terms.type);
|
||||
representation.metadata.add(SOLID_HTTP.terms.slug, 'newName');
|
||||
const result = await store.addResource(resourceID, representation);
|
||||
expect(result).toEqual({
|
||||
path: `${root}newName`,
|
||||
@@ -301,8 +303,8 @@ describe('A DataAccessorBasedStore', (): void => {
|
||||
|
||||
it('errors on a slug ending on / without Link rel:type Container header.', async(): Promise<void> => {
|
||||
const resourceID = { path: root };
|
||||
representation.metadata.removeAll(RDF.type);
|
||||
representation.metadata.add(SOLID_HTTP.slug, 'noContainer/');
|
||||
representation.metadata.removeAll(RDF.terms.type);
|
||||
representation.metadata.add(SOLID_HTTP.terms.slug, 'noContainer/');
|
||||
representation.data = guardedStreamFrom([ `` ]);
|
||||
const result = store.addResource(resourceID, representation);
|
||||
|
||||
@@ -314,9 +316,9 @@ describe('A DataAccessorBasedStore', (): void => {
|
||||
it('creates a URI when the incoming slug does not end with /, ' +
|
||||
'but has a Link rel:type Container header.', async(): Promise<void> => {
|
||||
const resourceID = { path: root };
|
||||
representation.metadata.removeAll(RDF.type);
|
||||
representation.metadata.add(RDF.type, LDP.terms.Container);
|
||||
representation.metadata.add(SOLID_HTTP.slug, 'newContainer');
|
||||
representation.metadata.removeAll(RDF.terms.type);
|
||||
representation.metadata.add(RDF.terms.type, LDP.terms.Container);
|
||||
representation.metadata.add(SOLID_HTTP.terms.slug, 'newContainer');
|
||||
representation.data = guardedStreamFrom([ `` ]);
|
||||
const result = await store.addResource(resourceID, representation);
|
||||
expect(result).toEqual({
|
||||
@@ -326,7 +328,7 @@ describe('A DataAccessorBasedStore', (): void => {
|
||||
|
||||
it('generates a new URI if adding the slug would create an existing URI.', async(): Promise<void> => {
|
||||
const resourceID = { path: root };
|
||||
representation.metadata.add(SOLID_HTTP.slug, 'newName');
|
||||
representation.metadata.add(SOLID_HTTP.terms.slug, 'newName');
|
||||
accessor.data[`${root}newName`] = representation;
|
||||
const result = await store.addResource(resourceID, representation);
|
||||
expect(result).not.toEqual({
|
||||
@@ -339,17 +341,17 @@ describe('A DataAccessorBasedStore', (): void => {
|
||||
|
||||
it('generates http://test.com/%26%26 when slug is &%26.', async(): Promise<void> => {
|
||||
const resourceID = { path: root };
|
||||
representation.metadata.removeAll(RDF.type);
|
||||
representation.metadata.add(SOLID_HTTP.slug, '&%26');
|
||||
representation.metadata.removeAll(RDF.terms.type);
|
||||
representation.metadata.add(SOLID_HTTP.terms.slug, '&%26');
|
||||
const result = await store.addResource(resourceID, representation);
|
||||
expect(result).toEqual({ path: `${root}%26%26` });
|
||||
});
|
||||
|
||||
it('errors if the slug contains a slash.', async(): Promise<void> => {
|
||||
const resourceID = { path: root };
|
||||
representation.metadata.removeAll(RDF.type);
|
||||
representation.metadata.removeAll(RDF.terms.type);
|
||||
representation.data = guardedStreamFrom([ `` ]);
|
||||
representation.metadata.add(SOLID_HTTP.slug, 'sla/sh/es');
|
||||
representation.metadata.add(SOLID_HTTP.terms.slug, 'sla/sh/es');
|
||||
const result = store.addResource(resourceID, representation);
|
||||
await expect(result).rejects.toThrow(BadRequestHttpError);
|
||||
await expect(result).rejects.toThrow('Slugs should not contain slashes');
|
||||
@@ -357,8 +359,8 @@ describe('A DataAccessorBasedStore', (): void => {
|
||||
|
||||
it('errors if the slug would cause an auxiliary resource URI to be generated.', async(): Promise<void> => {
|
||||
const resourceID = { path: root };
|
||||
representation.metadata.removeAll(RDF.type);
|
||||
representation.metadata.add(SOLID_HTTP.slug, 'test.dummy');
|
||||
representation.metadata.removeAll(RDF.terms.type);
|
||||
representation.metadata.add(SOLID_HTTP.terms.slug, 'test.dummy');
|
||||
const result = store.addResource(resourceID, representation);
|
||||
await expect(result).rejects.toThrow(ForbiddenHttpError);
|
||||
await expect(result).rejects.toThrow('Slug bodies that would result in an auxiliary resource are forbidden');
|
||||
@@ -402,7 +404,7 @@ describe('A DataAccessorBasedStore', (): void => {
|
||||
const mock = jest.spyOn(accessor, 'getMetadata');
|
||||
|
||||
const resourceID = { path: `${root}` };
|
||||
representation.metadata.removeAll(RDF.type);
|
||||
representation.metadata.removeAll(RDF.terms.type);
|
||||
representation.metadata.contentType = 'text/turtle';
|
||||
representation.data = guardedStreamFrom([ `<${root}> a <coolContainer>.` ]);
|
||||
|
||||
@@ -416,7 +418,7 @@ describe('A DataAccessorBasedStore', (): void => {
|
||||
|
||||
it('will error if path does not end in slash and does not match its resource type.', async(): Promise<void> => {
|
||||
const resourceID = { path: `${root}resource` };
|
||||
representation.metadata.add(RDF.type, LDP.terms.Container);
|
||||
representation.metadata.add(RDF.terms.type, LDP.terms.Container);
|
||||
await expect(store.setRepresentation(resourceID, representation)).rejects.toThrow(
|
||||
new BadRequestHttpError('Containers should have a `/` at the end of their path, resources should not.'),
|
||||
);
|
||||
@@ -424,7 +426,7 @@ describe('A DataAccessorBasedStore', (): void => {
|
||||
|
||||
it('errors when trying to create a container with non-RDF data.', async(): Promise<void> => {
|
||||
const resourceID = { path: `${root}container/` };
|
||||
representation.metadata.add(RDF.type, LDP.terms.Container);
|
||||
representation.metadata.add(RDF.terms.type, LDP.terms.Container);
|
||||
await expect(store.setRepresentation(resourceID, representation)).rejects.toThrow(BadRequestHttpError);
|
||||
});
|
||||
|
||||
@@ -450,7 +452,7 @@ describe('A DataAccessorBasedStore', (): void => {
|
||||
const resourceID = { path: `${root}container/` };
|
||||
|
||||
// Generate based on URI
|
||||
representation.metadata.removeAll(RDF.type);
|
||||
representation.metadata.removeAll(RDF.terms.type);
|
||||
representation.metadata.contentType = 'text/turtle';
|
||||
representation.data = guardedStreamFrom([ `<${root}resource/> a <coolContainer>.` ]);
|
||||
await expect(store.setRepresentation(resourceID, representation)).resolves.toEqual([
|
||||
@@ -488,15 +490,15 @@ describe('A DataAccessorBasedStore', (): void => {
|
||||
|
||||
it('does not write generated metadata.', async(): Promise<void> => {
|
||||
const resourceID = { path: `${root}resource` };
|
||||
representation.metadata.add('notGen', 'value');
|
||||
representation.metadata.add('gen', 'value', SOLID_META.terms.ResponseMetadata);
|
||||
representation.metadata.add(namedNode('notGen'), 'value');
|
||||
representation.metadata.add(namedNode('gen'), 'value', SOLID_META.terms.ResponseMetadata);
|
||||
await expect(store.setRepresentation(resourceID, representation)).resolves.toEqual([
|
||||
{ path: root },
|
||||
{ path: `${root}resource` },
|
||||
]);
|
||||
await expect(arrayifyStream(accessor.data[resourceID.path].data)).resolves.toEqual([ resourceData ]);
|
||||
expect(accessor.data[resourceID.path].metadata.get('notGen')?.value).toBe('value');
|
||||
expect(accessor.data[resourceID.path].metadata.get('gen')).toBeUndefined();
|
||||
expect(accessor.data[resourceID.path].metadata.get(namedNode('notGen'))?.value).toBe('value');
|
||||
expect(accessor.data[resourceID.path].metadata.get(namedNode('gen'))).toBeUndefined();
|
||||
});
|
||||
|
||||
it('can write resources even if root does not exist.', async(): Promise<void> => {
|
||||
@@ -514,7 +516,7 @@ describe('A DataAccessorBasedStore', (): void => {
|
||||
const resourceID = { path: `${root}container/` };
|
||||
|
||||
// Generate based on URI
|
||||
representation.metadata.removeAll(RDF.type);
|
||||
representation.metadata.removeAll(RDF.terms.type);
|
||||
representation.metadata.contentType = 'internal/quads';
|
||||
representation.data = guardedStreamFrom(
|
||||
[ quad(namedNode(`${root}resource/`), namedNode('a'), namedNode('coolContainer')) ],
|
||||
@@ -529,7 +531,7 @@ describe('A DataAccessorBasedStore', (): void => {
|
||||
|
||||
it('errors when trying to create a container with containment triples.', async(): Promise<void> => {
|
||||
const resourceID = { path: `${root}container/` };
|
||||
representation.metadata.add(RDF.type, LDP.terms.Container);
|
||||
representation.metadata.add(RDF.terms.type, LDP.terms.Container);
|
||||
representation.metadata.contentType = 'text/turtle';
|
||||
representation.metadata.identifier = DataFactory.namedNode(`${root}resource/`);
|
||||
representation.data = guardedStreamFrom(
|
||||
@@ -548,9 +550,9 @@ describe('A DataAccessorBasedStore', (): void => {
|
||||
{ path: `${root}a/b/resource` },
|
||||
]);
|
||||
await expect(arrayifyStream(accessor.data[resourceID.path].data)).resolves.toEqual([ resourceData ]);
|
||||
expect(accessor.data[`${root}a/`].metadata.getAll(RDF.type).map((type): string => type.value))
|
||||
expect(accessor.data[`${root}a/`].metadata.getAll(RDF.terms.type).map((type): string => type.value))
|
||||
.toContain(LDP.Container);
|
||||
expect(accessor.data[`${root}a/b/`].metadata.getAll(RDF.type).map((type): string => type.value))
|
||||
expect(accessor.data[`${root}a/b/`].metadata.getAll(RDF.terms.type).map((type): string => type.value))
|
||||
.toContain(LDP.Container);
|
||||
});
|
||||
|
||||
@@ -568,7 +570,7 @@ describe('A DataAccessorBasedStore', (): void => {
|
||||
const resourceID = { path: `${root}` };
|
||||
|
||||
// Generate based on URI
|
||||
representation.metadata.removeAll(RDF.type);
|
||||
representation.metadata.removeAll(RDF.terms.type);
|
||||
representation.metadata.contentType = 'text/turtle';
|
||||
representation.data = guardedStreamFrom([]);
|
||||
await expect(store.setRepresentation(resourceID, representation)).resolves.toEqual([
|
||||
@@ -620,7 +622,7 @@ describe('A DataAccessorBasedStore', (): void => {
|
||||
});
|
||||
|
||||
it('will error when deleting a root storage container.', async(): Promise<void> => {
|
||||
representation.metadata.add(RDF.type, PIM.terms.Storage);
|
||||
representation.metadata.add(RDF.terms.type, PIM.terms.Storage);
|
||||
accessor.data[`${root}container/`] = representation;
|
||||
const result = store.deleteResource({ path: `${root}container/` });
|
||||
await expect(result).rejects.toThrow(MethodNotAllowedHttpError);
|
||||
@@ -629,7 +631,7 @@ describe('A DataAccessorBasedStore', (): void => {
|
||||
|
||||
it('will error when deleting an auxiliary of a root storage container if not allowed.', async(): Promise<void> => {
|
||||
const storageMetadata = new RepresentationMetadata(representation.metadata);
|
||||
storageMetadata.add(RDF.type, PIM.terms.Storage);
|
||||
storageMetadata.add(RDF.terms.type, PIM.terms.Storage);
|
||||
accessor.data[`${root}container/`] = new BasicRepresentation(representation.data, storageMetadata);
|
||||
accessor.data[`${root}container/.dummy`] = representation;
|
||||
auxiliaryStrategy.isRequiredInRoot = jest.fn().mockReturnValue(true);
|
||||
|
||||
@@ -18,6 +18,8 @@ import { toLiteral } from '../../../../src/util/TermUtil';
|
||||
import { CONTENT_TYPE, DC, LDP, POSIX, RDF, SOLID_META, XSD } from '../../../../src/util/Vocabularies';
|
||||
import { mockFs } from '../../../util/Util';
|
||||
|
||||
const { namedNode } = DataFactory;
|
||||
|
||||
jest.mock('fs');
|
||||
|
||||
const rootFilePath = 'uploads';
|
||||
@@ -104,10 +106,11 @@ describe('A FileDataAccessor', (): void => {
|
||||
metadata = await accessor.getMetadata({ path: `${base}resource.ttl` });
|
||||
expect(metadata.identifier.value).toBe(`${base}resource.ttl`);
|
||||
expect(metadata.contentType).toBe('text/turtle');
|
||||
expect(metadata.get(RDF.type)?.value).toBe(LDP.Resource);
|
||||
expect(metadata.get(POSIX.size)).toEqualRdfTerm(toLiteral('data'.length, XSD.terms.integer));
|
||||
expect(metadata.get(DC.modified)).toEqualRdfTerm(toLiteral(now.toISOString(), XSD.terms.dateTime));
|
||||
expect(metadata.get(POSIX.mtime)).toEqualRdfTerm(toLiteral(Math.floor(now.getTime() / 1000), XSD.terms.integer));
|
||||
expect(metadata.get(RDF.terms.type)?.value).toBe(LDP.Resource);
|
||||
expect(metadata.get(POSIX.terms.size)).toEqualRdfTerm(toLiteral('data'.length, XSD.terms.integer));
|
||||
expect(metadata.get(DC.terms.modified)).toEqualRdfTerm(toLiteral(now.toISOString(), XSD.terms.dateTime));
|
||||
expect(metadata.get(POSIX.terms.mtime)).toEqualRdfTerm(toLiteral(Math.floor(now.getTime() / 1000),
|
||||
XSD.terms.integer));
|
||||
// `dc:modified` is in the default graph
|
||||
expect(metadata.quads(null, null, null, SOLID_META.terms.ResponseMetadata)).toHaveLength(2);
|
||||
});
|
||||
@@ -115,8 +118,8 @@ describe('A FileDataAccessor', (): void => {
|
||||
it('does not generate size metadata for a container.', async(): Promise<void> => {
|
||||
cache.data = { container: {}};
|
||||
metadata = await accessor.getMetadata({ path: `${base}container/` });
|
||||
expect(metadata.get(POSIX.size)).toBeUndefined();
|
||||
expect(metadata.get(DC.modified)).toEqualRdfTerm(toLiteral(now.toISOString(), XSD.terms.dateTime));
|
||||
expect(metadata.get(POSIX.terms.size)).toBeUndefined();
|
||||
expect(metadata.get(DC.terms.modified)).toEqualRdfTerm(toLiteral(now.toISOString(), XSD.terms.dateTime));
|
||||
});
|
||||
|
||||
it('generates the metadata for a container.', async(): Promise<void> => {
|
||||
@@ -130,12 +133,13 @@ describe('A FileDataAccessor', (): void => {
|
||||
};
|
||||
metadata = await accessor.getMetadata({ path: `${base}container/` });
|
||||
expect(metadata.identifier.value).toBe(`${base}container/`);
|
||||
expect(metadata.getAll(RDF.type)).toEqualRdfTermArray(
|
||||
expect(metadata.getAll(RDF.terms.type)).toEqualRdfTermArray(
|
||||
[ LDP.terms.Container, LDP.terms.BasicContainer, LDP.terms.Resource ],
|
||||
);
|
||||
expect(metadata.get(POSIX.size)).toBeUndefined();
|
||||
expect(metadata.get(DC.modified)).toEqualRdfTerm(toLiteral(now.toISOString(), XSD.terms.dateTime));
|
||||
expect(metadata.get(POSIX.mtime)).toEqualRdfTerm(toLiteral(Math.floor(now.getTime() / 1000), XSD.terms.integer));
|
||||
expect(metadata.get(POSIX.terms.size)).toBeUndefined();
|
||||
expect(metadata.get(DC.terms.modified)).toEqualRdfTerm(toLiteral(now.toISOString(), XSD.terms.dateTime));
|
||||
expect(metadata.get(POSIX.terms.mtime)).toEqualRdfTerm(toLiteral(Math.floor(now.getTime() / 1000),
|
||||
XSD.terms.integer));
|
||||
// `dc:modified` is in the default graph
|
||||
expect(metadata.quads(null, null, null, SOLID_META.terms.ResponseMetadata)).toHaveLength(1);
|
||||
});
|
||||
@@ -169,7 +173,7 @@ describe('A FileDataAccessor', (): void => {
|
||||
|
||||
// Containers
|
||||
for (const child of children.filter(({ identifier }): boolean => identifier.value.endsWith('/'))) {
|
||||
const types = child.getAll(RDF.type).map((term): string => term.value);
|
||||
const types = child.getAll(RDF.terms.type).map((term): string => term.value);
|
||||
expect(types).toContain(LDP.Resource);
|
||||
expect(types).toContain(LDP.Container);
|
||||
expect(types).toContain(LDP.BasicContainer);
|
||||
@@ -177,7 +181,7 @@ describe('A FileDataAccessor', (): void => {
|
||||
|
||||
// Documents
|
||||
for (const child of children.filter(({ identifier }): boolean => !identifier.value.endsWith('/'))) {
|
||||
const types = child.getAll(RDF.type).map((term): string => term.value);
|
||||
const types = child.getAll(RDF.terms.type).map((term): string => term.value);
|
||||
expect(types).toContain(LDP.Resource);
|
||||
expect(types).toContain('http://www.w3.org/ns/iana/media-types/application/octet-stream#Resource');
|
||||
expect(types).not.toContain(LDP.Container);
|
||||
@@ -186,8 +190,8 @@ describe('A FileDataAccessor', (): void => {
|
||||
|
||||
// All resources
|
||||
for (const child of children) {
|
||||
expect(child.get(DC.modified)).toEqualRdfTerm(toLiteral(now.toISOString(), XSD.terms.dateTime));
|
||||
expect(child.get(POSIX.mtime)).toEqualRdfTerm(toLiteral(Math.floor(now.getTime() / 1000),
|
||||
expect(child.get(DC.terms.modified)).toEqualRdfTerm(toLiteral(now.toISOString(), XSD.terms.dateTime));
|
||||
expect(child.get(POSIX.terms.mtime)).toEqualRdfTerm(toLiteral(Math.floor(now.getTime() / 1000),
|
||||
XSD.terms.integer));
|
||||
// `dc:modified` is in the default graph
|
||||
expect(child.quads(null, null, null, SOLID_META.terms.ResponseMetadata))
|
||||
@@ -228,8 +232,8 @@ describe('A FileDataAccessor', (): void => {
|
||||
`${base}container/resource2`,
|
||||
]));
|
||||
|
||||
const types1 = children[0].getAll(RDF.type).map((term): string => term.value);
|
||||
const types2 = children[1].getAll(RDF.type).map((term): string => term.value);
|
||||
const types1 = children[0].getAll(RDF.terms.type).map((term): string => term.value);
|
||||
const types2 = children[1].getAll(RDF.terms.type).map((term): string => term.value);
|
||||
|
||||
expect(types1).toContain('http://www.w3.org/ns/iana/media-types/application/octet-stream#Resource');
|
||||
for (const type of types2) {
|
||||
@@ -279,7 +283,7 @@ describe('A FileDataAccessor', (): void => {
|
||||
});
|
||||
|
||||
it('does not write metadata that is stored by the file system.', async(): Promise<void> => {
|
||||
metadata.add(RDF.type, LDP.terms.Resource);
|
||||
metadata.add(RDF.terms.type, LDP.terms.Resource);
|
||||
await expect(accessor.writeDocument({ path: `${base}resource` }, data, metadata)).resolves.toBeUndefined();
|
||||
expect(cache.data.resource).toBe('data');
|
||||
expect(cache.data['resource.meta']).toBeUndefined();
|
||||
@@ -315,7 +319,7 @@ describe('A FileDataAccessor', (): void => {
|
||||
data.emit('error', new Error('error'));
|
||||
return null;
|
||||
};
|
||||
metadata.add('likes', 'apples');
|
||||
metadata.add(namedNode('likes'), 'apples');
|
||||
await expect(accessor.writeDocument({ path: `${base}resource` }, data, metadata))
|
||||
.rejects.toThrow('error');
|
||||
expect(cache.data['resource.meta']).toBeUndefined();
|
||||
@@ -325,7 +329,7 @@ describe('A FileDataAccessor', (): void => {
|
||||
cache.data = { 'resource$.ttl': '<this> <is> <data>.', 'resource.meta': '<this> <is> <metadata>.' };
|
||||
metadata.identifier = DataFactory.namedNode(`${base}resource`);
|
||||
metadata.contentType = 'text/plain';
|
||||
metadata.add('new', 'metadata');
|
||||
metadata.add(namedNode('new'), 'metadata');
|
||||
await expect(accessor.writeDocument({ path: `${base}resource` }, data, metadata))
|
||||
.resolves.toBeUndefined();
|
||||
expect(cache.data).toEqual({
|
||||
@@ -337,7 +341,7 @@ describe('A FileDataAccessor', (): void => {
|
||||
it('does not try to update the content-type if there is no original file.', async(): Promise<void> => {
|
||||
metadata.identifier = DataFactory.namedNode(`${base}resource.txt`);
|
||||
metadata.contentType = 'text/turtle';
|
||||
metadata.add('new', 'metadata');
|
||||
metadata.add(namedNode('new'), 'metadata');
|
||||
await expect(accessor.writeDocument({ path: `${base}resource.txt` }, data, metadata))
|
||||
.resolves.toBeUndefined();
|
||||
expect(cache.data).toEqual({
|
||||
|
||||
@@ -133,14 +133,14 @@ describe('An InMemoryDataAccessor', (): void => {
|
||||
)).resolves.toBeUndefined();
|
||||
|
||||
const newMetadata = new RepresentationMetadata(inputMetadata);
|
||||
newMetadata.add(RDF.type, LDP.terms.BasicContainer);
|
||||
newMetadata.add(RDF.terms.type, LDP.terms.BasicContainer);
|
||||
await expect(accessor.writeContainer(identifier, newMetadata)).resolves.toBeUndefined();
|
||||
|
||||
metadata = await accessor.getMetadata(identifier);
|
||||
expect(metadata.identifier.value).toBe(`${base}container/`);
|
||||
const quads = metadata.quads();
|
||||
expect(quads).toHaveLength(2);
|
||||
expect(metadata.getAll(RDF.type).map((term): string => term.value))
|
||||
expect(metadata.getAll(RDF.terms.type).map((term): string => term.value))
|
||||
.toEqual([ LDP.Container, LDP.BasicContainer ]);
|
||||
|
||||
const children = [];
|
||||
@@ -168,7 +168,7 @@ describe('An InMemoryDataAccessor', (): void => {
|
||||
expect(metadata.identifier.value).toBe(`${base}`);
|
||||
const quads = metadata.quads();
|
||||
expect(quads).toHaveLength(1);
|
||||
expect(metadata.getAll(RDF.type)).toHaveLength(1);
|
||||
expect(metadata.getAll(RDF.terms.type)).toHaveLength(1);
|
||||
|
||||
const children = [];
|
||||
for await (const child of accessor.getChildren(identifier)) {
|
||||
|
||||
@@ -63,7 +63,7 @@ describe('A ConstantConverter', (): void => {
|
||||
|
||||
it('does not support representations that are already in the right format.', async(): Promise<void> => {
|
||||
const preferences = { type: { 'text/html': 1 }};
|
||||
const metadata = new RepresentationMetadata({ contentType: 'text/html' });
|
||||
const metadata = new RepresentationMetadata({ [CONTENT_TYPE]: 'text/html' });
|
||||
const representation = { metadata } as any;
|
||||
const args = { identifier, representation, preferences };
|
||||
|
||||
@@ -101,7 +101,7 @@ describe('A ConstantConverter', (): void => {
|
||||
|
||||
it('replaces the representation of a supported request.', async(): Promise<void> => {
|
||||
const preferences = { type: { 'text/html': 1 }};
|
||||
const metadata = new RepresentationMetadata({ contentType: 'text/turtle' });
|
||||
const metadata = new RepresentationMetadata({ [CONTENT_TYPE]: 'text/turtle' });
|
||||
const representation = { metadata, data: { destroy: jest.fn() }} as any;
|
||||
const args = { identifier, representation, preferences };
|
||||
|
||||
|
||||
@@ -2,6 +2,7 @@ import 'jest-rdf';
|
||||
import { RepresentationMetadata } from '../../../../src/http/representation/RepresentationMetadata';
|
||||
import { ContentTypeReplacer } from '../../../../src/storage/conversion/ContentTypeReplacer';
|
||||
import { NotImplementedHttpError } from '../../../../src/util/errors/NotImplementedHttpError';
|
||||
import { CONTENT_TYPE } from '../../../../src/util/Vocabularies';
|
||||
|
||||
const binary = true;
|
||||
const data = { data: true };
|
||||
@@ -21,7 +22,7 @@ describe('A ContentTypeReplacer', (): void => {
|
||||
});
|
||||
|
||||
it('throws on an unsupported input type.', async(): Promise<void> => {
|
||||
const metadata = new RepresentationMetadata({ contentType: 'text/plain' });
|
||||
const metadata = new RepresentationMetadata({ [CONTENT_TYPE]: 'text/plain' });
|
||||
const representation = { metadata };
|
||||
const preferences = { type: { 'application/json': 1 }};
|
||||
|
||||
@@ -31,7 +32,7 @@ describe('A ContentTypeReplacer', (): void => {
|
||||
});
|
||||
|
||||
it('throws on an unsupported output type.', async(): Promise<void> => {
|
||||
const metadata = new RepresentationMetadata({ contentType: 'application/n-triples' });
|
||||
const metadata = new RepresentationMetadata({ [CONTENT_TYPE]: 'application/n-triples' });
|
||||
const representation = { metadata };
|
||||
const preferences = { type: { 'application/json': 1 }};
|
||||
|
||||
@@ -51,7 +52,7 @@ describe('A ContentTypeReplacer', (): void => {
|
||||
});
|
||||
|
||||
it('replaces a supported content type when no preferences are given.', async(): Promise<void> => {
|
||||
const metadata = new RepresentationMetadata({ contentType: 'application/n-triples' });
|
||||
const metadata = new RepresentationMetadata({ [CONTENT_TYPE]: 'application/n-triples' });
|
||||
const representation = { binary, data, metadata };
|
||||
const preferences = {};
|
||||
|
||||
@@ -62,7 +63,7 @@ describe('A ContentTypeReplacer', (): void => {
|
||||
});
|
||||
|
||||
it('replaces a supported content type when preferences are given.', async(): Promise<void> => {
|
||||
const metadata = new RepresentationMetadata({ contentType: 'application/n-triples' });
|
||||
const metadata = new RepresentationMetadata({ [CONTENT_TYPE]: 'application/n-triples' });
|
||||
const representation = { binary, data, metadata };
|
||||
const preferences = { type: { 'application/n-quads': 1 }};
|
||||
|
||||
@@ -73,7 +74,7 @@ describe('A ContentTypeReplacer', (): void => {
|
||||
});
|
||||
|
||||
it('replaces a supported wildcard type.', async(): Promise<void> => {
|
||||
const metadata = new RepresentationMetadata({ contentType: 'text/plain' });
|
||||
const metadata = new RepresentationMetadata({ [CONTENT_TYPE]: 'text/plain' });
|
||||
const representation = { binary, data, metadata };
|
||||
const preferences = { type: { 'application/octet-stream': 1 }};
|
||||
|
||||
@@ -84,7 +85,7 @@ describe('A ContentTypeReplacer', (): void => {
|
||||
});
|
||||
|
||||
it('picks the most preferred content type.', async(): Promise<void> => {
|
||||
const metadata = new RepresentationMetadata({ contentType: 'application/n-triples' });
|
||||
const metadata = new RepresentationMetadata({ [CONTENT_TYPE]: 'application/n-triples' });
|
||||
const representation = { binary, data, metadata };
|
||||
const preferences = { type: {
|
||||
'text/turtle': 0.5,
|
||||
|
||||
Reference in New Issue
Block a user