Skip to content

Commit

Permalink
Add/update tests
Browse files Browse the repository at this point in the history
Signed-off-by: Levko Kravets <[email protected]>
  • Loading branch information
kravets-levko committed Feb 5, 2023
1 parent 05af910 commit 077913a
Show file tree
Hide file tree
Showing 23 changed files with 1,155 additions and 23 deletions.
9 changes: 8 additions & 1 deletion lib/DBSQLSession.ts
Original file line number Diff line number Diff line change
Expand Up @@ -27,6 +27,7 @@ import StatusFactory from './factory/StatusFactory';
import InfoValue from './dto/InfoValue';
import { definedOrError } from './utils';
import IDBSQLLogger, { LogLevel } from './contracts/IDBSQLLogger';
import globalConfig from './globalConfig';

const defaultMaxRows = 100000;

Expand All @@ -49,9 +50,15 @@ function getDirectResultsOptions(maxRows: number | null = defaultMaxRows) {
}

function getArrowOptions(useArrowNativeTypes: boolean | undefined): {
canReadArrowResult: true | false;
canReadArrowResult: boolean;
useArrowNativeTypes?: TSparkArrowTypes;
} {
if (!globalConfig.arrowEnabled) {
return {
canReadArrowResult: false,
};
}

if (useArrowNativeTypes === undefined) {
useArrowNativeTypes = true;
}
Expand Down
3 changes: 3 additions & 0 deletions lib/globalConfig.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
export default {
arrowEnabled: true,
};
13 changes: 11 additions & 2 deletions lib/result/ArrowResult.ts
Original file line number Diff line number Diff line change
Expand Up @@ -92,14 +92,18 @@ export default class ArrowResult implements IOperationResult {
return result;
}

// Convert lists to JS array and process items recursively values
// Convert lists to JS array and process items recursively
if (value instanceof Vector) {
const result = value.toJSON();
// Array type contains the only child which defines a type of each array's element
const field = fieldsMap.element;
return result.map((item) => this.convertArrowTypes(item, field?.type, field?.type.children || []));
}

if (DataType.isTimestamp(valueType)) {
return new Date(value);
}

// Convert big number values to BigInt
// Decimals are also represented as big numbers in Arrow, so additionally process them (convert to float)
if (value instanceof Object && value[isArrowBigNumSymbol]) {
Expand All @@ -110,8 +114,13 @@ export default class ArrowResult implements IOperationResult {
return result;
}

// Convert binary data to Buffer
if (value instanceof Uint8Array) {
return Buffer.from(value);
}

// Return other values as is
return value;
return typeof value === 'bigint' ? Number(value) : value;
}

private convertThriftTypes(record: Record<string, any>): any {
Expand Down
17 changes: 13 additions & 4 deletions lib/result/utils.ts
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
import Int64 from 'node-int64';
import { TTableSchema, TColumnDesc, TPrimitiveTypeEntry, TTypeId } from '../../thrift/TCLIService_types';

export function getSchemaColumns(schema?: TTableSchema): Array<TColumnDesc> {
Expand Down Expand Up @@ -26,10 +27,16 @@ function convertJSON(value: any, defaultValue: any): any {

function convertBigInt(value: any): any {
if (typeof value === 'bigint') {
return value;
return Number(value);
}
if (value instanceof Int64) {
return value.toNumber();
}
// TODO: `Int64.toNumber()` returns a regular JS number value; should we return BigInt instead?
return value.toNumber();
return value;
}

function convertDate(value: any): Date {
return value instanceof Date ? value : new Date(Date.parse(`${value} UTC`));
}

export function convertThriftValue(typeDescriptor: TPrimitiveTypeEntry | undefined, value: any): any {
Expand All @@ -38,8 +45,10 @@ export function convertThriftValue(typeDescriptor: TPrimitiveTypeEntry | undefin
}

switch (typeDescriptor.type) {
case TTypeId.TIMESTAMP_TYPE:
case TTypeId.DATE_TYPE:
return convertDate(value);
case TTypeId.TIMESTAMP_TYPE:
return convertDate(value);
case TTypeId.UNION_TYPE:
case TTypeId.USER_DEFINED_TYPE:
return String(value);
Expand Down
2 changes: 1 addition & 1 deletion tests/e2e/arrow.test.js
Original file line number Diff line number Diff line change
Expand Up @@ -98,7 +98,7 @@ async function initializeTable(session, tableName) {
);
}

describe('Arrow support', () => {
describe.skip('Arrow support', () => {
const tableName = `dbsql_nodejs_sdk_e2e_arrow_${config.tableSuffix}`;

function createTest(testBody) {
Expand Down
13 changes: 11 additions & 2 deletions tests/e2e/batched_fetch.test.js
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@ const { expect } = require('chai');
const config = require('./utils/config');
const logger = require('./utils/logger')(config.logger);
const { DBSQLClient } = require('../..');
const globalConfig = require('../../dist/globalConfig').default;

const openSession = async () => {
const client = new DBSQLClient();
Expand All @@ -19,6 +20,14 @@ const openSession = async () => {
};

describe('Data fetching', () => {
beforeEach(() => {
globalConfig.arrowEnabled = false;
});

afterEach(() => {
globalConfig.arrowEnabled = true;
});

const query = `
SELECT *
FROM range(0, 1000) AS t1
Expand All @@ -27,14 +36,14 @@ describe('Data fetching', () => {

it('fetch chunks should return a max row set of chunkSize', async () => {
const session = await openSession();
const operation = await session.executeStatement(query, { runAsync: true, maxRows: null, enableArrow: false });
const operation = await session.executeStatement(query, { runAsync: true, maxRows: null });
let chunkedOp = await operation.fetchChunk({ maxRows: 10 }).catch((error) => logger(error));
expect(chunkedOp.length).to.be.equal(10);
});

it('fetch all should fetch all records', async () => {
const session = await openSession();
const operation = await session.executeStatement(query, { runAsync: true, maxRows: null, enableArrow: false });
const operation = await session.executeStatement(query, { runAsync: true, maxRows: null });
let all = await operation.fetchAll();
expect(all.length).to.be.equal(1000);
});
Expand Down
19 changes: 16 additions & 3 deletions tests/e2e/data_types.test.js
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@ const { expect } = require('chai');
const config = require('./utils/config');
const logger = require('./utils/logger')(config.logger);
const { DBSQLClient } = require('../..');
const globalConfig = require('../../dist/globalConfig').default;

const openSession = async () => {
const client = new DBSQLClient();
Expand All @@ -19,12 +20,16 @@ const openSession = async () => {
};

const execute = async (session, statement) => {
const operation = await session.executeStatement(statement, { runAsync: true, enableArrow: false });
const operation = await session.executeStatement(statement, { runAsync: true });
const result = await operation.fetchAll();
await operation.close();
return result;
};

function convertDate(strValue) {
return new Date(Date.parse(`${strValue} UTC`));
}

function removeTrailingMetadata(columns) {
const result = [];
for (let i = 0; i < columns.length; i++) {
Expand All @@ -38,6 +43,14 @@ function removeTrailingMetadata(columns) {
}

describe('Data types', () => {
beforeEach(() => {
globalConfig.arrowEnabled = false;
});

afterEach(() => {
globalConfig.arrowEnabled = true;
});

it('primitive data types should presented correctly', async () => {
const table = `dbsql_nodejs_sdk_e2e_primitive_types_${config.tableSuffix}`;

Expand Down Expand Up @@ -173,11 +186,11 @@ describe('Data types', () => {
dbl: 2.2,
dec: 3.2,
str: 'data',
ts: '2014-01-17 00:17:13',
ts: convertDate('2014-01-17 00:17:13'),
bin: Buffer.from('data'),
chr: 'a',
vchr: 'b',
dat: '2014-01-17',
dat: convertDate('2014-01-17'),
},
]);

Expand Down
14 changes: 14 additions & 0 deletions tests/fixtures/compatibility/README.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,14 @@
This directory contains fixtures for testing different data types support.

SQL files contain table structure and sample data, other files contain raw
responses for different combination of options (with or without Arrow support enabled,
with or without Arrow native types). In all cases, data should be decoded in
exactly the same way.

Known issues:

- with Arrow disabled _or_ with Arrow native types disabled:
- date values are not properly serialized in nested structures, so complex types cannot be JSON-decoded;
therefore this case is not represented in this test set
- any non-string type used as map key is not properly serialized and cannot be JSON-decoded;
therefore this case is not represented in this test set
Binary file added tests/fixtures/compatibility/arrow/data.arrow
Binary file not shown.
28 changes: 28 additions & 0 deletions tests/fixtures/compatibility/arrow/index.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,28 @@
const Int64 = require('node-int64');

const fs = require('fs');
const path = require('path');

const thriftSchema = require('../thrift_schema');
const arrowSchema = fs.readFileSync(path.join(__dirname, 'schema.arrow'));
const data = fs.readFileSync(path.join(__dirname, 'data.arrow'));
const expected = require('../expected');

exports.schema = thriftSchema;

exports.arrowSchema = arrowSchema;

exports.rowSets = [
{
startRowOffset: new Int64(Buffer.from([0, 0, 0, 0, 0, 0, 0, 0]), 0),
rows: [],
arrowBatches: [
{
batch: data,
rowCount: new Int64(Buffer.from([0, 0, 0, 0, 0, 0, 0, 1]), 0),
},
],
},
];

exports.expected = expected;
Binary file added tests/fixtures/compatibility/arrow/schema.arrow
Binary file not shown.
Binary file not shown.
28 changes: 28 additions & 0 deletions tests/fixtures/compatibility/arrow_native_types/index.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,28 @@
const Int64 = require('node-int64');

const fs = require('fs');
const path = require('path');

const thriftSchema = require('../thrift_schema');
const arrowSchema = fs.readFileSync(path.join(__dirname, 'schema.arrow'));
const data = fs.readFileSync(path.join(__dirname, 'data.arrow'));
const expected = require('../expected');

exports.schema = thriftSchema;

exports.arrowSchema = arrowSchema;

exports.rowSets = [
{
startRowOffset: new Int64(Buffer.from([0, 0, 0, 0, 0, 0, 0, 0]), 0),
rows: [],
arrowBatches: [
{
batch: data,
rowCount: new Int64(Buffer.from([0, 0, 0, 0, 0, 0, 0, 1]), 0),
},
],
},
];

exports.expected = expected;
Binary file not shown.
Loading

0 comments on commit 077913a

Please sign in to comment.