Thanks to visit codestin.com
Credit goes to github.com

Skip to content
This repository was archived by the owner on Aug 29, 2025. It is now read-only.
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
14 changes: 0 additions & 14 deletions NEW_CONNECTION.md
Original file line number Diff line number Diff line change
Expand Up @@ -33,20 +33,6 @@ The following are the instructions for updating the Tab React JS file
....
```

## Updating the Settings.react.js
Update the fetchData function to make the calls to appropriate sessions.js backend.
Add your new connection to the following list:

```javascript
if (contains(connectionObject.dialect, [
DIALECTS.APACHE_IMPALA,
DIALECTS.APACHE_SPARK,
DIALECTS.IBM_DB2,
DIALECTS.MYSQL, DIALECTS.MARIADB, DIALECTS.POSTGRES,
DIALECTS.REDSHIFT, DIALECTS.MSSQL, DIALECTS.SQLITE
])) {
```

## New Datastore file
This file should be added in the following location
backend/persistent/datastores
Expand Down
9 changes: 1 addition & 8 deletions app/components/Settings/Settings.react.js
Original file line number Diff line number Diff line change
Expand Up @@ -194,14 +194,7 @@ class Settings extends Component {
}

const connectionObject = connections[selectedTab] || {};
if (contains(connectionObject.dialect, [
DIALECTS.APACHE_IMPALA,
DIALECTS.APACHE_SPARK,
DIALECTS.IBM_DB2,
DIALECTS.MYSQL, DIALECTS.MARIADB, DIALECTS.POSTGRES,
DIALECTS.REDSHIFT, DIALECTS.MSSQL, DIALECTS.SQLITE,
DIALECTS.DATA_WORLD
])) {
if (contains(connectionObject.dialect, SQL_DIALECTS_USING_EDITOR)) {
if (connectRequest.status === 200 && !tablesRequest.status) {
this.setState({editMode: false});
getTables();
Expand Down
2 changes: 2 additions & 0 deletions app/components/Settings/Tabs/Tab.react.js
Original file line number Diff line number Diff line change
Expand Up @@ -32,6 +32,8 @@ export default class ConnectionTab extends Component {
label = `Apache Impala (${connectionObject.host}:${connectionObject.port})`;
} else if (dialect === DIALECTS.APACHE_SPARK) {
label = `Apache Spark (${connectionObject.host}:${connectionObject.port})`;
} else if (connectionObject.dialect === DIALECTS.CSV) {
label = `CSV (${connectionObject.database})`;
} else if (connectionObject.dialect === DIALECTS.ELASTICSEARCH) {
label = `Elasticsearch (${connectionObject.host})`;
} else if (connectionObject.dialect === DIALECTS.SQLITE) {
Expand Down
16 changes: 14 additions & 2 deletions app/constants/constants.js
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,8 @@ export const DIALECTS = {
APACHE_SPARK: 'apache spark',
APACHE_IMPALA: 'apache impala',
APACHE_DRILL: 'apache drill',
DATA_WORLD: 'data.world'
DATA_WORLD: 'data.world',
CSV: 'csv'
};

export const SQL_DIALECTS_USING_EDITOR = [
Expand All @@ -28,7 +29,8 @@ export const SQL_DIALECTS_USING_EDITOR = [
'ibm db2',
'apache spark',
'apache impala',
'data.world'
'data.world',
'csv'
];

const commonSqlOptions = [
Expand Down Expand Up @@ -74,6 +76,9 @@ const hadoopQLOptions = [
export const CONNECTION_CONFIG = {
[DIALECTS.APACHE_IMPALA]: hadoopQLOptions,
[DIALECTS.APACHE_SPARK]: hadoopQLOptions,
[DIALECTS.CSV]: [
{'label': 'URL to CSV File', 'value': 'database', 'type': 'text'}
],
[DIALECTS.IBM_DB2]: commonSqlOptions,
[DIALECTS.MYSQL]: commonSqlOptions,
[DIALECTS.MARIADB]: commonSqlOptions,
Expand Down Expand Up @@ -201,6 +206,7 @@ export const CONNECTION_CONFIG = {
export const LOGOS = {
[DIALECTS.APACHE_SPARK]: 'images/spark-logo.png',
[DIALECTS.APACHE_IMPALA]: 'images/impala-logo.png',
[DIALECTS.CSV]: 'images/csv-logo.png',
[DIALECTS.IBM_DB2]: 'images/ibmdb2-logo.png',
[DIALECTS.REDSHIFT]: 'images/redshift-logo.png',
[DIALECTS.POSTGRES]: 'images/postgres-logo.png',
Expand All @@ -216,6 +222,8 @@ export const LOGOS = {

export function PREVIEW_QUERY(connection, table, elasticsearchIndex) {
switch (connection.dialect) {
case DIALECTS.CSV:
return 'SELECT TOP 1000 * FROM ?';
case DIALECTS.IBM_DB2:
return `SELECT * FROM ${table} FETCH FIRST 1000 ROWS ONLY`;
case DIALECTS.APACHE_IMPALA:
Expand Down Expand Up @@ -324,6 +332,10 @@ export const SAMPLE_DBS = {
host: 'spark.test.plotly.host',
dialect: DIALECTS.APACHE_SPARK
},
[DIALECTS.CSV]: {
dialect: DIALECTS.CSV,
database: 'http://www.fdic.gov/bank/individual/failed/banklist.csv'
},
[DIALECTS.IBM_DB2]: {
username: 'db2user1',
password: 'w8wfy99DvEmgkBsE',
Expand Down
Binary file added app/images/csv-logo.png
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
3 changes: 3 additions & 0 deletions backend/persistent/datastores/Datastores.js
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@ import * as ApacheDrill from './ApacheDrill';
import * as IbmDb2 from './ibmdb2';
import * as ApacheLivy from './livy';
import * as ApacheImpala from './impala';
import * as CSV from './csv';
import * as DataWorld from './dataworld';
import * as DatastoreMock from './datastoremock';

Expand Down Expand Up @@ -44,6 +45,8 @@ function getDatastoreClient(connection) {
return ApacheLivy;
} else if (dialect === 'apache impala') {
return ApacheImpala;
} else if (dialect === 'csv') {
return CSV;
} else if (dialect === 'ibm db2') {
return IbmDb2;
} else if (dialect === 'data.world') {
Expand Down
145 changes: 145 additions & 0 deletions backend/persistent/datastores/csv.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,145 @@
const alasql = require('alasql');
import fetch from 'node-fetch';
import papa from 'papaparse';
import {type} from 'ramda';

import {parseSQL} from '../../parse';

/**
* @typedef {object} PapaError Papaparse error
*
* @property {string} type Error type
* @property {string} code Error code
* @property {string} message Error description
* @property {number} row Row index that triggered the error
*/

/**
* Error thrown by CSV connector
* @class
* @param {string} url URL of the CSV file that triggered the error
* @param {PapaError[]} errors List of errors returned by Papaparse
*/
export function CSVError(url, errors) {
/**
* Error class
* @type {string}
*/
this.name = 'CSVError';

/**
* Error description
* @type {string}
*/
this.message = 'Failed to parse CSV file ' + url;

if (Error.captureStackTrace) {
Error.captureStackTrace(this, CSVError);
} else {
/**
* Error stack trace
*/
this.stack = new Error(this.message).stack;
}

/**
* URL to CSV file
* @type {string}
*/
this.url = url;

/**
* List of errors returned by Papaparse
* @type {PapaError[]}
*/
this.errors = errors;

if (errors && errors[0] && errors[0].message) {
this.message = errors[0].message;
}
}
CSVError.prototype = Object.create(Error.prototype);
CSVError.prototype.constructor = CSVError;

/**
* Store of CSV files parsed into JS objects and indexed by URL
*
* @const {Object.<string, object>}
*/
const connectionData = {};
function getData(connection) {
return connectionData[connection.database];
}
function putData(connection, data) {
connectionData[connection.database] = data;
}

export function connect(connection) {
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

If the url is bad, how will this be presented to the user on the Settings UI? will fetch throw an error and this will be gracefully passed to the UI on its own?

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Same behaviour for all the connectors, i.e.: fetch throws, routes.js catches the exception and replies to the connection request with a 500 status and the body {error: message: error.message}}.

const url = connection.database;

return fetch(url)
.then(res => res.text())
.then(body => {
return new Promise(function(resolve) {
papa.parse(body, {
download: false,
dynamicTyping: true,
skipEmptyLines: true,
header: true,
worker: true,

complete: function({data, errors, meta}) {
if (errors.length) {
throw new CSVError(url, errors);
}

connection.meta = meta;

putData(connection, data);

resolve(connection);
}
});
});
});
}

/**
* Table name used in SQL queries to refer to the data imported from a CSV file,
* so that we can take advantage of alaSQL's parser.
* @const {string}
*/
const TABLENAME = '?';

export function tables() {
return Promise.resolve([TABLENAME]);
}

export function schemas(connection) {
const columnnames = ['TABNAME', 'COLNAME', 'TYPENAME'];
const rows = connection.meta.fields.map(columnName => {
return [TABLENAME, columnName, getType(columnName)];
});

return Promise.resolve({columnnames, rows});

function getType(columnName) {
const data = getData(connection);

for (let i = 0; i < data.length; i++) {
const cell = data[i][columnName];
if (cell) return type(cell);
}

// If we reach this point, the column is empty.
// Let's return 'String', as none of the cells can be converted to Number.
return 'String';
}
}

export function query(queryString, connection) {
const data = getData(connection);

// In the query `SELECT * FROM ?`, alaSQL replaces ? with data
return alasql.promise(queryString, [data]).then(parseSQL);
}
4 changes: 3 additions & 1 deletion package.json
Original file line number Diff line number Diff line change
Expand Up @@ -28,6 +28,7 @@
"test-unit-all-watch": "cross-env NODE_ENV=test BABEL_DISABLE_CACHE=1 electron-mocha --bail --full-trace --timeout 90000 --compilers js:babel-register --recursive test/**/*.spec.js --watch",
"test-unit-watch": "cross-env NODE_ENV=test BABEL_DISABLE_CACHE=1 electron-mocha --bail --full-trace --timeout 90000 --watch --compilers js:babel-register ",
"test-unit-certificates": "cross-env NODE_ENV=test BABEL_DISABLE_CACHE=1 electron-mocha --full-trace --timeout 90000 --compilers js:babel-register test/backend/certificates.spec.js",
"test-unit-csv": "cross-env NODE_ENV=test BABEL_DISABLE_CACHE=1 electron-mocha --full-trace --timeout 90000 --compilers js:babel-register test/backend/datastores.csv.spec.js",
"test-unit-datastores": "cross-env NODE_ENV=test BABEL_DISABLE_CACHE=1 electron-mocha --full-trace --timeout 90000 --compilers js:babel-register test/backend/Datastores.spec.js",
"test-unit-dataworld": "cross-env NODE_ENV=test BABEL_DISABLE_CACHE=1 electron-mocha --full-trace --timeout 90000 --compilers js:babel-register test/backend/datastores.dataworld.spec.js",
"test-unit-elasticsearch": "cross-env NODE_ENV=test BABEL_DISABLE_CACHE=1 electron-mocha --full-trace --timeout 90000 --compilers js:babel-register test/backend/datastores.elasticsearch.spec.js",
Expand All @@ -36,7 +37,6 @@
"test-unit-ibmdb": "cross-env NODE_ENV=test BABEL_DISABLE_CACHE=1 electron-mocha --full-trace --timeout 90000 --compilers js:babel-register test/backend/datastores.ibmdb.spec.js",
"test-unit-impala": "cross-env NODE_ENV=test BABEL_DISABLE_CACHE=1 electron-mocha --full-trace --timeout 90000 --compilers js:babel-register test/backend/datastores.impala.spec.js",
"test-unit-livy": "cross-env NODE_ENV=test BABEL_DISABLE_CACHE=1 electron-mocha --full-trace --timeout 90000 --compilers js:babel-register test/backend/datastores.livy.spec.js",
"test-unit-nock": "cross-env NODE_ENV=test BABEL_DISABLE_CACHE=1 electron-mocha --full-trace --timeout 90000 --compilers js:babel-register test/backend/datastores.dataworld.spec.js test/backend/datastores.elasticsearch*.spec.js",
"test-unit-oauth2": "cross-env NODE_ENV=test BABEL_DISABLE_CACHE=1 electron-mocha --full-trace --timeout 90000 --compilers js:babel-register test/backend/routes.oauth2.spec.js",
"test-unit-plotly": "cross-env NODE_ENV=test BABEL_DISABLE_CACHE=1 electron-mocha --full-trace --timeout 90000 --compilers js:babel-register test/backend/PlotlyAPI.spec.js",
"test-unit-scheduler": "cross-env NODE_ENV=test BABEL_DISABLE_CACHE=1 electron-mocha --full-trace --timeout 90000 --compilers js:babel-register test/backend/QueryScheduler.spec.js",
Expand Down Expand Up @@ -215,10 +215,12 @@
"yamljs": "^0.3.0"
},
"dependencies": {
"alasql": "^0.4.5",
"csv-parse": "^2.0.0",
"font-awesome": "^4.6.1",
"ibm_db": "git+https://[email protected]/n-riesco/node-ibm_db.git#patched-v2.2.1",
"mysql": "^2.15.0",
"papaparse": "^4.3.7",
"pg": "^4.5.5",
"pg-hstore": "^2.3.2",
"restify": "^4.3.2",
Expand Down
91 changes: 91 additions & 0 deletions test/backend/datastores.csv.spec.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,91 @@
// do not use import, otherwise other test units won't be able to reactivate nock
const nock = require('nock');

import {assert} from 'chai';

import {
connect,
query,
schemas,
tables
} from '../../backend/persistent/datastores/Datastores.js';

const csvFile = [
'col1,col 2,"col 3",col 4',
'1,1.1,2018-01-10,UK',
'2,2.2,2019-02-20,ES',
'3,3.3,2020-03-30,PL',
'' // to test csv files with empty lines can be parsed
].join('\n');

const expected = {
columnnames: ['col1', 'col 2', 'col 3', 'col 4'],
rows: [
[1, 1.1, '2018-01-10', 'UK'],
[2, 2.2, '2019-02-20', 'ES'],
[3, 3.3, '2020-03-30', 'PL']
],
schemas: [
['?', 'col1', 'Number'],
['?', 'col 2', 'Number'],
['?', 'col 3', 'String'],
['?', 'col 4', 'String']
]
};

const host = 'https://csv.example.com';
const path = '/table.csv';
const url = host + path;
const connection = {
dialect: 'csv',
database: url
};

describe('CSV:', function () {
before(function() {
// Enable nock if it has been disabled by other specs
if (!nock.isActive()) nock.activate();
});

after(function() {
nock.restore();
});

it('connect succeeds', function() {
// mock connect response
nock(host)
.get(path)
.reply(200, csvFile);

return connect(connection)
.then(conn => {
assert.equal(conn.dialect, 'csv', 'Unexpected connection.dialect');
assert.equal(conn.database, url, 'Unexpected connection.database');
assert(conn.meta, 'Missing connection.meta');
assert.deepEqual(conn.meta.fields, expected.columnnames, 'Unexpected connection.meta.fields');
});
});

it('tables succeeds', function() {
return tables(connection)
.then(obtained => {
assert.deepEqual(obtained, ['?'], 'Unexpected list of tables');
});
});

it('schemas succeeds', function() {
return schemas(connection)
.then(({columnnames, rows}) => {
assert.equal(columnnames.length, 3, 'Unexpected columnnames');
assert.deepEqual(rows, expected.schemas, 'Unexpected rows');
});
});

it('query succeeds', function() {
return query('SELECT * FROM ?', connection)
.then(({columnnames, rows}) => {
assert.deepEqual(columnnames, expected.columnnames, 'Unexpected columnnames');
assert.deepEqual(rows, expected.rows, 'Unexpected rows');
});
});
});
1 change: 1 addition & 0 deletions webpack.config.base.js
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@ import path from 'path';

export default {
module: {
noParse: [/alasql/],
rules: [{
test: /\.jsx?$/,
use: [{
Expand Down
Loading