Skip to content

Commit

Permalink
allow import of CSV files
Browse files Browse the repository at this point in the history
  • Loading branch information
ali1k committed Jan 9, 2019
1 parent 4d4c54b commit 750ac29
Show file tree
Hide file tree
Showing 5 changed files with 131 additions and 7 deletions.
16 changes: 16 additions & 0 deletions actions/importCSV.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,16 @@
import {navigateAction} from 'fluxible-router';
export default function importCSV(context, payload, done) {
context.service.create('import.jsonld', payload, {timeout: 120 * 1000}, function (err, res) {
if (err) {
context.dispatch('IMPORT_CSV_FAILURE', err);
done();
} else {
context.dispatch('IMPORT_CSV_SUCCESS', res);
context.executeAction(navigateAction, {
url: '/dataset/1/' + encodeURIComponent(res.datasetURI)
});
done();
}
});

}
11 changes: 10 additions & 1 deletion components/resource/CSVMappingResource.js
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@ import URIUtil from '../utils/URIUtil';
import {connectToStores} from 'fluxible-addons-react';
import cloneResource from '../../actions/cloneResource';
import createJSONLD from '../../actions/createJSONLD';
import importCSV from '../../actions/importCSV';
import ImportStore from '../../stores/ImportStore';
import WaitAMoment from '../WaitAMoment';

Expand Down Expand Up @@ -36,6 +37,13 @@ class CSVMappingResource extends React.Component {
});
e.stopPropagation();
}
handleImportCSV(resourceURI, e) {
this.context.executeAction(importCSV, {
resourceURI: resourceURI,
importMethod: 'batchInsert'
});
e.stopPropagation();
}
render() {
//check erros first
if(this.props.error){
Expand Down Expand Up @@ -197,7 +205,8 @@ class CSVMappingResource extends React.Component {
: ''}
</h2>
{mainDIV}
{allowJSONLD ? <div className="ui big primary button" onClick={this.handleCreateJSONLD.bind(this, decodeURIComponent(this.props.resource))}>Create JSON-LD</div>: null}
{allowJSONLD ? <div className="ui big primary button" onClick={this.handleImportCSV.bind(this, decodeURIComponent(this.props.resource))}>Import Data</div>: null}
{allowJSONLD ? <div className="ui big button" onClick={this.handleCreateJSONLD.bind(this, decodeURIComponent(this.props.resource))}>Export Data as JSON-LD</div>: null}
</div>
: null
}
Expand Down
5 changes: 5 additions & 0 deletions plugins/import/CSVMapper.js
Original file line number Diff line number Diff line change
Expand Up @@ -138,14 +138,18 @@ class CSVMapper {
}
let resourcePrefix = baseResourceDomain[0] + '/r/';
let vocabPrefix = baseResourceDomain[0] + '/v/';
let datasetURI = baseResourceDomain[0] + '/dataset/';
let rnc = resourcePrefix + Math.round(+new Date() / 1000);
//do not add two slashes
if(baseResourceDomain[0].slice(-1) === '/'){
resourcePrefix = baseResourceDomain[0] + 'r/';
vocabPrefix = baseResourceDomain[0] + 'v/';
datasetURI = baseResourceDomain[0] + 'dataset/';
rnc = resourcePrefix + Math.round(+new Date() / 1000);
}
let cmRND = 'cm' + Math.round(+new Date() / 1000);
//create a dataset if not given
datasetURI = options.datasetURI ? options.datasetURI : datasetURI + Math.round(+new Date() / 1000);
let date = new Date();
let currentDate = date.toISOString(); //"2011-12-19T15:28:46.493Z"
//query the triple store for adding configs
Expand Down Expand Up @@ -177,6 +181,7 @@ class CSVMapper {
ldr:customMappings r:${cmRND};
ldr:resourcePrefix <${resourcePrefix}>;
ldr:vocabPrefix <${vocabPrefix}>;
ldr:dataset <${datasetURI}>;
${userSt}
ldr:createdOn "${currentDate}"^^xsd:dateTime .
r:${cmRND} ${customMappings.join(' ')}
Expand Down
49 changes: 44 additions & 5 deletions services/import.js
Original file line number Diff line number Diff line change
Expand Up @@ -110,6 +110,17 @@ export default {
callback(null, {r: res, d: mappingsDatasetURI[0]});
});
} else if (resource === 'import.jsonld') {
//control access on authentication
if(enableAuthentication){
if(!req.user){
callback(null, {output: ''});
return 0;
}else{
user = req.user;
}
}else{
user = {accountName: 'open'};
}
//generate and upload the JSON-LD file from CSV config
getJSONLDConfig(params.resourceURI, {}, (res)=>{
//console.log(res);
Expand Down Expand Up @@ -219,7 +230,7 @@ export default {
if(!prop.trim()){
continue;
}
if(prop.toLowerCase() === contextOptions['idColumn'].toLowerCase()){
if(contextOptions['idColumn'] && prop.toLowerCase() === contextOptions['idColumn'].toLowerCase()){
tmpObj['@id'] = 'r:' + encodeURIComponent(camelCase(data[prop]));
}
if(contextOptions['skippedColumns'].indexOf(camelCase(prop)) === -1){
Expand All @@ -230,6 +241,10 @@ export default {
}
}
}
//add a random ID if no ID column is specified
if(!contextOptions['idColumn'] ){
tmpObj['@id'] = 'r:' + counter+ '_' + Math.round(+new Date() / 1000);
}
graphArr.push(tmpObj);
//console.log(data);
})
Expand All @@ -246,11 +261,35 @@ export default {
'@context': contextObj,
'@graph': graphArr
};
fs.writeFile(jsonPath, JSON.stringify(jsonLD), function(err, data){
if (err) console.log(err);
callback(null, {output: '/' + uploadFolder[0]+ '/' + jsonFileName});
});
if(params.importMethod){
//todo: handle different import methods
//default method: one big INSERT query
//console.log(jsonLD);
let importDatasetURI = res.dataset;
if(!importDatasetURI){
importDatasetURI = baseResourceDomain[0] +'/dataset/' + Math.round(+new Date() / 1000);
}
getDynamicEndpointParameters(user, importDatasetURI, (endpointParameters)=>{
graphName = endpointParameters.graphName;
let query = queryObject.csvBatchInsert(endpointParameters, user, graphName, jsonLD);
//console.log(query);
//build http uri
//send request
HTTPQueryObject = getHTTPQuery('update', query, endpointParameters, outputFormat);
rp.post({uri: HTTPQueryObject.uri, form: HTTPQueryObject.params}).then(function(res){
callback(null, {datasetURI: importDatasetURI});
}).catch(function (err) {
console.log(err);
callback(null, {datasetURI: importDatasetURI});
});

});
}else{
fs.writeFile(jsonPath, JSON.stringify(jsonLD), function(err, data){
if (err) console.log(err);
callback(null, {output: '/' + uploadFolder[0]+ '/' + jsonFileName});
});
}
});

let counter = 0;
Expand Down
57 changes: 56 additions & 1 deletion services/sparql/ImportQuery.js
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
'use strict';
import validUrl from 'valid-url';
class ImportQuery{
constructor() {
this.prefixes = `
Expand All @@ -15,6 +16,60 @@ class ImportQuery{
`;
this.query='';
}

getPrefixes() {
return this.prefixes;
}
prepareGraphName(graphName){
let gStart = 'GRAPH <'+ graphName +'> { ';
let gEnd = ' } ';
if(!graphName || graphName === 'default'){
gStart =' ';
gEnd = ' ';
}
return {gStart: gStart, gEnd: gEnd}
}
csvBatchInsert(endpointParameters, user, graphName, jsonld) {
//todo: consider different value types
let {gStart, gEnd} = this.prepareGraphName(graphName);
let userSt = '';
if(user && user.accountName !== 'open' && !parseInt(user.isSuperUser)){
userSt=` ldr:createdBy <${user.id}> ;`;
}
let date = new Date();
let currentDate = date.toISOString(); //"2011-12-19T15:28:46.493Z"
jsonld['@graph'].forEach((node, index)=>{
let propsSt = '';
for(let prop in node){
if(prop !== '@type' && prop !=='@id'){
propsSt = propsSt + `${validUrl.is_web_uri(prop) ? '<'+prop+'>': prop} ${validUrl.is_web_uri(node[prop]) ? '<'+node[prop]+'>': '"""'+node[prop]+'"""'} ; `;
}
}
this.query = this.query + `
INSERT DATA {
${gStart}
${node['@id']} a ${node['@type']} ;
${propsSt}
${userSt}
ldr:createdOn "${currentDate}"^^xsd:dateTime .
${gEnd}
};
`;
});
//add prefixes
let prefixes = '';
//add ldr prefix
if(!jsonld['@context']['ldr']){
prefixes = 'PREFIX ldr: <https://github.com/ali1k/ld-reactor/blob/master/vocabulary/index.ttl#>';
}
for(let prop in jsonld['@context']){
let val = jsonld['@context'][prop];
if((typeof val) === 'string'){
prefixes = prefixes + `
PREFIX ${prop}: <${val}>
`;
}
}
return prefixes + this.query;
}
}
export default ImportQuery;

0 comments on commit 750ac29

Please sign in to comment.