-
Notifications
You must be signed in to change notification settings - Fork 588
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
Epic: integrate with ibis-server (#403)
* Chore: integrate with ibis server (#306) * feat: Add ibis adaptor for querying supported data sources * feat: Add testIbis resolver for querying supported data sources * feat: use queryService for preview and describe statement * feat: Refactor ibisAdaptor to use DataSourceName enum consistently * test ibis using tpch query * rm test ibis * feat: Add previewSql resolver for SQL preview * rm debug log * use projectId and manifest to generate hash Implement getMDL api * pass mdl to ibis server to preview * transform manifest data type to json object when retrieved from database * better error message from Ibis server, rewrite host when development and using docker * use base64 str to transfer mdl * update README * change env name to fit naming style. update docker-compose.x.yaml and .env.example file * fix yarn build issue * update docker-compose file: fix incorrent ibis server endpoint * port forward to ibis server * fix incorrect migration rollback function * update engine version * Chore: refactor database schema to reflect how we use them. (#341) * add connectionInfo column and do data migration * drop deprecated columns that store connection info, use connectionInfo column instead * do data migration to store tableReference in model's properties * fix incorrect migration rollback function * use tableReference in mdl instead of refSql * chore: Update variable names in ibisAdaptor and queryService * chore: Update variable names in ibisAdaptor and queryService * fix lint * rename variable in ibis adaptor * - remove duplicate logic to parse string - add comment * release 0.4.0-rc.1 (#378) * fix: fix nested resolver to format sql (#379) * feat(wren-ui): Provide limit property in preview data API for excel-add-in used (#373) * fix(wren-ui): add displayName to all view API which share ViewInfo type in graphQL * feat(wren-ui): provide limit property of preview API for usage in excel-add-in * Chore: minimized the diversity of data sources from wren-ui (#384) * chore: Update ibisAdaptor to include methods for getting tables and constraints * remove strategies and connectors from codebase, use ibis server and wrenEngine to fetch metadata of data source * add validation rule api in ibisAdapter * replace invalid char when generating referenceName * fix comments * Feature: support dry run in preview API (#402) * can dry-run with previewSql * update env.example and docker-compose-dev.yaml --------- Co-authored-by: Shimin <[email protected]>
- Loading branch information
1 parent
cbb0710
commit f4e4551
Showing
59 changed files
with
2,292 additions
and
2,720 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,24 @@ | ||
/** | ||
* @param { import("knex").Knex } knex | ||
* @returns { Promise<void> } | ||
*/ | ||
|
||
// create connectionInfo column in project table | ||
exports.up = function (knex) { | ||
return knex.schema.table('project', (table) => { | ||
table | ||
.jsonb('connection_info') | ||
.nullable() | ||
.comment('Connection information for the project'); | ||
}); | ||
}; | ||
|
||
/** | ||
* @param { import("knex").Knex } knex | ||
* @returns { Promise<void> } | ||
*/ | ||
exports.down = function (knex) { | ||
return knex.schema.table('project', (table) => { | ||
table.dropColumn('connection_info'); | ||
}); | ||
}; |
82 changes: 82 additions & 0 deletions
82
wren-ui/migrations/20240530062809_transfer_project_table_data.js
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,82 @@ | ||
/** | ||
* @param { import("knex").Knex } knex | ||
* @returns { Promise<void> } | ||
*/ | ||
exports.up = async function (knex) { | ||
const projects = await knex('project').select('*'); | ||
|
||
// bigquery data | ||
const bigqueryConnectionInfo = projects | ||
.filter((project) => project.type === 'BIG_QUERY') | ||
.map((project) => { | ||
return { | ||
id: project.id, | ||
connectionInfo: { | ||
projectId: project.project_id, | ||
datasetId: project.dataset_id, | ||
credentials: project.credentials, | ||
}, | ||
}; | ||
}); | ||
|
||
// duckdb data | ||
const duckdbConnectionInfo = projects | ||
.filter((project) => project.type === 'DUCKDB') | ||
.map((project) => { | ||
return { | ||
id: project.id, | ||
connectionInfo: { | ||
initSql: project.init_sql || '', | ||
configurations: project.configurations || {}, | ||
extensions: project.extensions || [], | ||
}, | ||
}; | ||
}); | ||
|
||
// postgres data | ||
const postgresConnectionInfo = projects | ||
.filter((project) => project.type === 'POSTGRES') | ||
.map((project) => { | ||
const ssl = | ||
project.configurations && project.configurations.ssl ? true : false; | ||
return { | ||
id: project.id, | ||
connectionInfo: { | ||
host: project.host, | ||
port: project.port, | ||
database: project.database, | ||
user: project.user, | ||
password: project.credentials, | ||
ssl, | ||
}, | ||
}; | ||
}); | ||
|
||
// update project table | ||
for (const project of [ | ||
...bigqueryConnectionInfo, | ||
...duckdbConnectionInfo, | ||
...postgresConnectionInfo, | ||
]) { | ||
const { id, connectionInfo } = project; | ||
if (process.env.DB_TYPE === 'pg') { | ||
// postgres | ||
await knex('project') | ||
.where({ id }) | ||
.update({ connection_info: connectionInfo }); | ||
} else { | ||
// sqlite | ||
await knex('project') | ||
.where({ id }) | ||
.update({ connection_info: JSON.stringify(connectionInfo) }); | ||
} | ||
} | ||
}; | ||
|
||
/** | ||
* @param { import("knex").Knex } knex | ||
* @returns { Promise<void> } | ||
*/ | ||
exports.down = async function (knex) { | ||
await knex('project').update({ connection_info: null }); | ||
}; |
65 changes: 65 additions & 0 deletions
65
wren-ui/migrations/20240530105955_drop_project_table_columns.js
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,65 @@ | ||
/** | ||
* @param { import("knex").Knex } knex | ||
* @returns { Promise<void> } | ||
*/ | ||
exports.up = function (knex) { | ||
return knex.schema.table('project', (table) => { | ||
table.dropColumn('configurations'); | ||
table.dropColumn('credentials'); | ||
table.dropColumn('project_id'); | ||
table.dropColumn('dataset_id'); | ||
table.dropColumn('init_sql'); | ||
table.dropColumn('extensions'); | ||
table.dropColumn('host'); | ||
table.dropColumn('port'); | ||
table.dropColumn('database'); | ||
table.dropColumn('user'); | ||
}); | ||
}; | ||
|
||
/** | ||
* @param { import("knex").Knex } knex | ||
* @returns { Promise<void> } | ||
*/ | ||
exports.down = function (knex) { | ||
return knex.schema.table('project', (table) => { | ||
table | ||
.jsonb('configurations') | ||
.nullable() | ||
.comment( | ||
'duckdb configurations that can be set in session, eg: { "key1": "value1", "key2": "value2" }', | ||
); | ||
table | ||
.text('credentials') | ||
.nullable() | ||
.comment('database connection credentials'); | ||
table | ||
.string('project_id') | ||
.nullable() | ||
.comment('gcp project id, big query specific'); | ||
table.string('dataset_id').nullable().comment('big query datasetId'); | ||
table.text('init_sql'); | ||
table | ||
.jsonb('extensions') | ||
.nullable() | ||
.comment( | ||
'duckdb extensions, will be a array-like string like, eg: ["extension1", "extension2"]', | ||
); | ||
table | ||
.string('host') | ||
.nullable() | ||
.comment('postgresql host, postgresql specific'); | ||
table | ||
.integer('port') | ||
.nullable() | ||
.comment('postgresql port, postgresql specific'); | ||
table | ||
.string('database') | ||
.nullable() | ||
.comment('postgresql database, postgresql specific'); | ||
table | ||
.string('user') | ||
.nullable() | ||
.comment('postgresql user, postgresql specific'); | ||
}); | ||
}; |
65 changes: 65 additions & 0 deletions
65
wren-ui/migrations/20240531085916_transfer_model_properties.js
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,65 @@ | ||
/** | ||
* @param { import("knex").Knex } knex | ||
* @returns { Promise<void> } | ||
*/ | ||
exports.up = async function (knex) { | ||
const projects = await knex('project').select('*'); | ||
const models = await knex('model').select('*'); | ||
console.log(`model len:${models.length}`); | ||
for (const model of models) { | ||
const project = projects.find((p) => p.id === model.project_id); | ||
const dataSourceType = project.type; | ||
// get schema & catalog if its available | ||
let schema = null; | ||
let catalog = null; | ||
let table = null; | ||
switch (dataSourceType) { | ||
case 'BIG_QUERY': { | ||
const connectionInfo = | ||
typeof project.connection_info === 'string' | ||
? JSON.parse(project.connection_info) | ||
: project.connection_info; | ||
const datasetId = connectionInfo.datasetId; | ||
if (!datasetId) continue; | ||
const splitDataSetId = datasetId.split('.'); | ||
schema = splitDataSetId[1]; | ||
catalog = splitDataSetId[0]; | ||
table = model.source_table_name; | ||
break; | ||
} | ||
case 'POSTGRES': { | ||
const connectionInfo = | ||
typeof project.connection_info === 'string' | ||
? JSON.parse(project.connection_info) | ||
: project.connection_info; | ||
catalog = connectionInfo.database; | ||
schema = model.source_table_name.split('.')[0]; | ||
table = model.source_table_name.split('.')[1]; | ||
break; | ||
} | ||
case 'DUCKDB': { | ||
// already have schema & catalog in properties | ||
table = model.source_table_name; | ||
break; | ||
} | ||
} | ||
const oldProperties = model.properties ? JSON.parse(model.properties) : {}; | ||
const newProperties = { | ||
schema, | ||
catalog, | ||
table, | ||
...oldProperties, | ||
}; | ||
await knex('model') | ||
.where({ id: model.id }) | ||
.update({ properties: JSON.stringify(newProperties) }); | ||
} | ||
}; | ||
|
||
/** | ||
* @param { import("knex").Knex } knex | ||
* @returns { Promise<void> } | ||
*/ | ||
exports.down = function () { | ||
return Promise.resolve(); | ||
}; |
Oops, something went wrong.