Skip to content
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions .eslintrc.json
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,7 @@
"node": true
},
"rules": {
"indent": ["error", 4],
"no-param-reassign": "off",
"no-console": "warn",
"no-plusplus": "off",
Expand Down
2 changes: 1 addition & 1 deletion README.md
Original file line number Diff line number Diff line change
Expand Up @@ -164,7 +164,7 @@ The `options` parameter is optional, and can define the following:
| --- | --- | --- |
|context | http://schema.org/ | The value in `@context`. It overwrites the one in the query.|
| sparqlFunction | `null` | A function receiving in input the transformed query in SPARQL, returning a Promise. If not specified, the module performs the query on its own<sup id="a1">[1](#f1)</sup> against the specified endpoint. |
| endpoint | http://dbpedia.org/sparql | Used only if `sparqlFunction` is not specified. |
| endpoint | https://dbpedia.org/sparql | Used only if `sparqlFunction` is not specified. |
| debug | `false` | Enter in debug mode. This allow to print in console the generated SPARQL query. |
| params | `{}` | Additional parameters to pass to the HTTP query |

Expand Down
2 changes: 1 addition & 1 deletion motivation.md
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@ WHERE {
rdfs:label ?name .
} LIMIT 100
```
Extract of the [json results](http://dbpedia.org/sparql?default-graph-uri=http%3A%2F%2Fdbpedia.org&query=SELECT+*%0D%0AWHERE+%7B%0D%0A+%3Fcity+a+%3Chttp%3A%2F%2Fdbpedia.org%2Fontology%2FCity%3E+%3B%0D%0A++++++dbo%3Acountry+dbr%3AItaly+%3B%0D%0A++++++foaf%3Adepiction+%3Fimage+%3B%0D%0A++++++rdfs%3Alabel+%3Fname+.%0D%0A%7D+LIMIT+100&format=text%2Fhtml&CXML_redir_for_subjs=121&CXML_redir_for_hrefs=&timeout=30000&debug=on&run=+Run+Query+):
Extract of the [json results](https://dbpedia.org/sparql?default-graph-uri=http%3A%2F%2Fdbpedia.org&query=SELECT+*%0D%0AWHERE+%7B%0D%0A+%3Fcity+a+%3Chttp%3A%2F%2Fdbpedia.org%2Fontology%2FCity%3E+%3B%0D%0A++++++dbo%3Acountry+dbr%3AItaly+%3B%0D%0A++++++foaf%3Adepiction+%3Fimage+%3B%0D%0A++++++rdfs%3Alabel+%3Fname+.%0D%0A%7D+LIMIT+100&format=text%2Fhtml&CXML_redir_for_subjs=121&CXML_redir_for_hrefs=&timeout=30000&debug=on&run=+Run+Query+):

```json
{
Expand Down
6,555 changes: 1,476 additions & 5,079 deletions package-lock.json

Large diffs are not rendered by default.

23 changes: 8 additions & 15 deletions package.json
Original file line number Diff line number Diff line change
Expand Up @@ -6,13 +6,16 @@
"type": "module",
"module": "src/main.mjs",
"browser": "dist/browser.mjs",
"engines": {
"node": ">=18.0.0"
},
"files": [
"**/*.mjs",
"*.js"
],
"repository": "https://github.com/D2KLab/sparql-transformer",
"scripts": {
"test": " DEBUG_LEVEL=debug ava",
"test": "DEBUG_LEVEL=debug node --test test.js",
"prepublishOnly": "rollup -c",
"postpublish": "git push --follow-tags"
},
Expand All @@ -23,27 +26,17 @@
],
"author": "Pasquale Lisena <pasquale.lisena@eurecom.fr> (http://pasqlisena.github.io/)",
"license": "Apache-2.0",
"ava": {
"require": [
"esm"
]
},
"devDependencies": {
"ava": "^5.2.0",
"eslint": "^8.34.0",
"eslint": "^8.57.0",
"eslint-config-airbnb-base": "^15.0.0",
"eslint-plugin-import": "^2.27.5",
"eslint-plugin-import": "^2.29.1",
"esm": "^3.2.25",
"nock": "^13.3.0",
"rimraf": "^4.1.2",
"rollup": "^3.17.2",
"rollup": "^4.21.1",
"rollup-plugin-cleanup": "^3.2.1"
},
"dependencies": {
"axios": "^1.3.3",
"fast-deep-equal": "^3.1.3",
"is-valid-path": "^0.1.1",
"jsonfile": "^6.1.0",
"object-assign-deep": "^0.4.0"
}
}
}
1 change: 0 additions & 1 deletion rollup.config.js
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,6 @@ export default {
external: [
'fast-deep-equal',
'object-assign-deep',
'axios',
],
plugins: [cleanup({
extensions: ['js', 'mjs'],
Expand Down
2 changes: 1 addition & 1 deletion src/main.mjs
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@ const debug = new Debugger();
const INDENT = ' ';
const DEFAULT_OPTIONS = {
context: 'http://schema.org/',
endpoint: 'http://dbpedia.org/sparql',
endpoint: 'https://dbpedia.org/sparql',
langTag: 'show',
};

Expand Down
11 changes: 6 additions & 5 deletions src/node_main.mjs
Original file line number Diff line number Diff line change
@@ -1,11 +1,12 @@
import isValidPath from 'is-valid-path';
import jsonfile from 'jsonfile';
import fs from 'fs';

import sparqlTransformer from './main.mjs';

export default function(input, options = {}) {
if (isValidPath(input)) input = jsonfile.readFileSync(input);
export default function (input, options = {}) {
if (fs.existsSync(input) && fs.lstatSync(input).isFile()) {
input = JSON.parse(fs.readFileSync(input, 'utf8'));
}

options.env = process && process.env;
return sparqlTransformer(input, options);
}
}
22 changes: 17 additions & 5 deletions src/sparql-client.mjs
Original file line number Diff line number Diff line change
@@ -1,6 +1,4 @@
/* Minimal SPARQL client */
import axios from 'axios';

function validURL(str) {
const pattern = new RegExp('^(https?:\\/\\/)?' // protocol
+
Expand All @@ -24,9 +22,23 @@ export default class SparqlClient {
}

query(q, params = {}) {
return axios.post(this.endpoint, new URLSearchParams({...params, query: q })).then((res) => {
if (Math.floor(res.status / 100) == 2) return res.data; // all 2xx status (200, 206, ...)
throw new Error(res.statusText);
// Query to the SPARQL endpoint
return fetch(this.endpoint, {
method: 'POST',
headers: {
'Accept': 'application/sparql-results+json',
'Content-Type': 'application/x-www-form-urlencoded',
},
body: new URLSearchParams({
...params,
query: q,
}),
}).then((response) => {
if (!response.ok) {
throw new Error(response.statusText);
}

return response.json();
});
}
}
110 changes: 46 additions & 64 deletions test.js
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
import test from 'ava';
import fs from 'fs'
import fs from 'fs';
import path from 'path';
import nock from 'nock';
import { test, mock } from 'node:test';
import assert from 'node:assert';

import * as lib from './src/node_main.mjs';

Expand All @@ -12,19 +12,19 @@ const JSONLD_QUERIES = './examples/json_queries/';
const SPARQL_QUERIES = './examples/sparql_queries/';
const SPARQL_OUTPUTS = './examples/sparql_output/';

function mock(file) {
nock('http://dbpedia.org')
.post('/sparql')
.query(true)
.reply(200, file);
function mockFetch(file) {
mock.method(global, 'fetch', () => Promise.resolve({
ok: true,
json: () => Promise.resolve(JSON.parse(file)),
}));
}

async function getSparqlQuery(q) {
let sparqlQuery = null;
try {
await sparqlTransformer(q, {
debug: false,
sparqlFunction: async(query) => {
sparqlFunction: async (query) => {
sparqlQuery = ` ${query.trim()}`;
return Promise.reject();
},
Expand All @@ -44,128 +44,110 @@ function loadFiles(file) {
return [orig, q, sparql, expected];
}

test('DBpedia list of cities (proto)', async(t) => {
test('DBpedia list of cities (proto)', async (t) => {
const file = 'city.list.json';
const [orig, q, sparql, expected] = loadFiles(file);
mock(orig);
mockFetch(orig);

const outSparql = await getSparqlQuery(q);
t.deepEqual(outSparql, sparql);
assert.deepStrictEqual(outSparql, sparql);

const out = await sparqlTransformer(q);
// fs.writeFileSync('a.json', JSON.stringify(out, null, 2), 'utf-8');

t.deepEqual(out, expected);
assert.deepStrictEqual(out, expected);
});

test('DBpedia list of cities and regions (jsonld)', async(t) => {
test('DBpedia list of cities and regions (jsonld)', async () => {
const file = 'city.region.list.ld.json';
const [orig, q, sparql, expected] = loadFiles(file);
mock(orig);
mockFetch(orig);

const outSparql = await getSparqlQuery(q);
t.deepEqual(outSparql, sparql);
assert.deepStrictEqual(outSparql, sparql);

const out = await sparqlTransformer(q);
// fs.writeFileSync('a.json', JSON.stringify(out, null, 2), 'utf-8');

t.deepEqual(out, expected);
assert.deepStrictEqual(out, expected);
});

test('DBpedia grunge bands', async(t) => {
test('DBpedia grunge bands', async () => {
const file = 'band.json';
const [orig, q, sparql, expected] = loadFiles(file);
mock(orig);
mockFetch(orig);

const outSparql = await getSparqlQuery(q);
t.deepEqual(outSparql, sparql);
assert.deepStrictEqual(outSparql, sparql);

const out = await sparqlTransformer(q);
// fs.writeFileSync('a.json', JSON.stringify(out, null, 2), 'utf-8');

t.deepEqual(out, expected);
assert.deepStrictEqual(out, expected);
});

test('DBpedia genres with bands', async(t) => {
test('DBpedia genres with bands', async () => {
const file = 'band_reversed.json';
const [orig, q, sparql, expected] = loadFiles(file);
mock(orig);
mockFetch(orig);

const outSparql = await getSparqlQuery(q);
t.deepEqual(outSparql.trim(), sparql.trim());
assert.deepStrictEqual(outSparql.trim(), sparql.trim());

const out = await sparqlTransformer(q);
// fs.writeFileSync('a.json', JSON.stringify(out, null, 2), 'utf-8');

t.deepEqual(out, expected);
assert.deepStrictEqual(out, expected);
});

test('Aggregates', async(t) => {
test('Aggregates', async () => {
const file = 'aggregates.json';
const [orig, q, sparql, expected] = loadFiles(file);
mock(orig);
mockFetch(orig);

const outSparql = await getSparqlQuery(q);
t.deepEqual(outSparql.trim(), sparql.trim());
assert.deepStrictEqual(outSparql.trim(), sparql.trim());

const out = await sparqlTransformer(q);
// fs.writeFileSync('a.json', JSON.stringify(out, null, 2), 'utf-8');

t.deepEqual(out, expected);
assert.deepStrictEqual(out, expected);
});

test('No lang tag', async(t) => {
test('No lang tag', async () => {
const file = 'city.list.ld.json';
const [orig, q, sparql, expected] = loadFiles(file);
mock(orig);
mockFetch(orig);

const outSparql = await getSparqlQuery(q);
t.deepEqual(outSparql, sparql);
assert.deepStrictEqual(outSparql, sparql);

const out = await sparqlTransformer(q);
// fs.writeFileSync('a.json', JSON.stringify(out, null, 2), 'utf-8');

t.deepEqual(out, expected);
assert.deepStrictEqual(out, expected);
});

test('Duplicate variable name', async(t) => {
test('Duplicate variable name', async () => {
const file = 'issue_10_duplicate_vars.json';
const [orig, q, sparql, expected] = loadFiles(file);
mock(orig);
mockFetch(orig);

const outSparql = await getSparqlQuery(q);
t.deepEqual(outSparql, sparql);
assert.deepStrictEqual(outSparql, sparql);

const out = await sparqlTransformer(q);
// fs.writeFileSync('a.json', JSON.stringify(out, null, 2), 'utf-8');

t.deepEqual(out, expected);
assert.deepStrictEqual(out, expected);
});

test('List-required fieds', async(t) => {
test('List-required fields', async () => {
const file = 'band_forcelist.json';
const [orig, q, sparql, expected] = loadFiles(file);
mock(orig);
mockFetch(orig);

const outSparql = await getSparqlQuery(q);
t.deepEqual(outSparql, sparql);
assert.deepStrictEqual(outSparql, sparql);

const out = await sparqlTransformer(q);
// fs.writeFileSync('a.json', JSON.stringify(out, null, 2), 'utf-8');

t.deepEqual(out, expected);
assert.deepStrictEqual(out, expected);
});

test('Library limit', async(t) => {
test('Library limit', async () => {
const file = 'band.liblimit.json';
const [orig, q, sparql, expected] = loadFiles(file);
mock(orig);
mockFetch(orig);

const outSparql = await getSparqlQuery(q);
t.deepEqual(outSparql.trim(), sparql.trim());
assert.deepStrictEqual(outSparql.trim(), sparql.trim());

const out = await sparqlTransformer(q);
// fs.writeFileSync('a.json', JSON.stringify(out, null, 2), 'utf-8');

t.deepEqual(out, expected);
});
assert.deepStrictEqual(out, expected);
});