Skip to content

Commit

Permalink
Merge pull request #60 from outerbase/invisal/snowflake-support
Browse files Browse the repository at this point in the history
snowflake support
  • Loading branch information
invisal authored Nov 14, 2024
2 parents 6d03a81 + c01458f commit 40ce6e8
Show file tree
Hide file tree
Showing 7 changed files with 4,489 additions and 1,086 deletions.
21 changes: 21 additions & 0 deletions .github/workflows/test.yml
Original file line number Diff line number Diff line change
Expand Up @@ -208,3 +208,24 @@ jobs:
MOTHERDUCK_PATH: ${{ secrets.MOTHERDUCK_PATH }}
MOTHERDUCK_TOKEN: ${{ secrets.MOTHERDUCK_TOKEN }}
run: npm run test:connection

test_snowflake:
name: 'Snowflake Connection'
runs-on: ubuntu-latest
needs: build

steps:
- uses: actions/checkout@v4

- name: Install modules
run: npm install

- name: Run tests
env:
CONNECTION_TYPE: snowflake
SNOWFLAKE_ACCOUNT_ID: ${{ secrets.SNOWFLAKE_ACCOUNT_ID }}
SNOWFLAKE_USERNAME: ${{ secrets.SNOWFLAKE_USERNAME }}
SNOWFLAKE_PASSWORD: ${{ secrets.SNOWFLAKE_PASSWORD }}
SNOWFLAKE_WAREHOUSE: ${{ secrets.SNOWFLAKE_WAREHOUSE }}
SNOWFLAKE_DATABASE: ${{ secrets.SNOWFLAKE_DATABASE }}
run: npm run test:connection
5,311 changes: 4,233 additions & 1,078 deletions package-lock.json

Large diffs are not rendered by default.

11 changes: 6 additions & 5 deletions package.json
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
{
"name": "@outerbase/sdk",
"version": "2.0.0-rc.1",
"version": "2.0.0-rc.2",
"description": "",
"main": "dist/index.js",
"module": "dist/index.js",
Expand Down Expand Up @@ -37,26 +37,27 @@
"handlebars": "^4.7.8"
},
"devDependencies": {
"@google-cloud/bigquery": "^7.9.0",
"@jest/globals": "^29.7.0",
"@libsql/client": "^0.14.0",
"@neondatabase/serverless": "^0.9.3",
"@types/jest": "^29.5.13",
"@types/node": "^20.12.12",
"@types/ws": "^8.5.10",
"dotenv": "^16.4.5",
"duckdb": "^1.1.1",
"husky": "^9.0.11",
"jest": "^29.7.0",
"lint-staged": "^15.2.4",
"mongodb": "^6.9.0",
"mysql2": "^3.11.3",
"pg": "^8.13.0",
"prettier": "^3.2.5",
"ts-jest": "^29.1.3",
"ts-node": "^10.9.2",
"tsconfig-paths": "^4.2.0",
"typescript": "^5.4.5",
"@google-cloud/bigquery": "^7.9.0",
"@neondatabase/serverless": "^0.9.3",
"duckdb": "^1.1.1",
"ws": "^8.17.1",
"mongodb": "^6.9.0"
"snowflake-sdk": "^1.15.0"
}
}
2 changes: 1 addition & 1 deletion src/connections/mysql.ts
Original file line number Diff line number Diff line change
Expand Up @@ -49,7 +49,7 @@ interface MySQLConstraintResult {
CONSTRAINT_TYPE: string;
}

interface MySQLConstraintColumnResult {
export interface MySQLConstraintColumnResult {
TABLE_SCHEMA: string;
TABLE_NAME: string;
COLUMN_NAME: string;
Expand Down
213 changes: 213 additions & 0 deletions src/connections/snowflake/snowflake.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,213 @@
import snowflake from 'snowflake-sdk';
import { Query } from '../../query';
import { QueryResult } from '..';
import {
createErrorResult,
transformArrayBasedResult,
} from '../../utils/transformer';
import { Database, TableColumn } from '../../models/database';
import { PostgreBaseConnection } from './../postgre/base';
import {
buildMySQLDatabaseSchmea,
MySQLConstraintColumnResult,
} from '../mysql';

export class SnowflakeConnection extends PostgreBaseConnection {
protected db: snowflake.Connection;

constructor(db: any) {
super();
this.db = db;
}

async connect(): Promise<any> {
await new Promise((resolve, reject) => {
this.db.connectAsync((err, conn) => {
if (err) reject(err.message);
else resolve(conn);
});
});
}

async disconnect(): Promise<any> {
await new Promise((resolve) => this.db.destroy(resolve));
}

async testConnection(): Promise<{ error?: string }> {
try {
await this.connect();
const { data } = await this.query({
query: 'SELECT CURRENT_DATABASE() AS DBNAME;',
});

await this.disconnect();
if (!data[0].DBNAME) return { error: 'Database does not exist' };

return {};
} catch (e) {
if (e instanceof Error) return { error: e.message };
return { error: 'Unknown error' };
}
}

async fetchDatabaseSchema(): Promise<Database> {
// Get the list of schema first
const { data: schemaList } = await this.query<{ SCHEMA_NAME: string }>({
query: `SELECT SCHEMA_NAME FROM information_schema.schemata WHERE schema_name NOT IN ('INFORMATION_SCHEMA');`,
});

// Get the list of all tables
const { data: tableList } = await this.query<{
TABLE_NAME: string;
TABLE_SCHEMA: string;
}>({
query: `SELECT TABLE_NAME, TABLE_SCHEMA FROM information_schema.tables WHERE table_schema NOT IN ('INFORMATION_SCHEMA');`,
});

// Get the list of all columns
const { data: columnList } = await this.query<{
TABLE_SCHEMA: string;
TABLE_NAME: string;
COLUMN_NAME: string;
DATA_TYPE: string;
IS_NULLABLE: string;
COLUMN_DEFAULT: string;
ORDINAL_POSITION: number;
}>({
query: `SELECT * FROM information_schema.columns WHERE table_schema NOT IN ('INFORMATION_SCHEMA');`,
});

// Get the list of all constraints
const { data: constraintsList } = await this.query<{
CONSTRAINT_SCHEMA: string;
CONSTRAINT_NAME: string;
TABLE_NAME: string;
TABLE_SCHEMA: string;
CONSTRAINT_TYPE: string;
}>({
query: `SELECT * FROM information_schema.table_constraints WHERE CONSTRAINT_SCHEMA NOT IN ('INFORMATION_SCHEMA') AND CONSTRAINT_TYPE IN ('FOREIGN KEY', 'PRIMARY KEY', 'UNIQUE');`,
});

// Mamic the key usages table using SHOW PRIMARY KEY and SHOW FOREIGN KEYS
const { data: primaryKeyConstraint } = await this.query<{
schema_name: string;
table_name: string;
column_name: string;
constraint_name: string;
}>({ query: `SHOW PRIMARY KEYS;` });

const { data: foreignKeyConstraint } = await this.query<{
pk_schema_name: string;
pk_table_name: string;
pk_column_name: string;
fk_schema_name: string;
fk_table_name: string;
fk_column_name: string;
fk_name: string;
}>({ query: `SHOW IMPORTED KEYS;` });

// Postgres structure is similar to MySQL, so we can reuse the MySQL schema builder
// by just mapping the column names
return buildMySQLDatabaseSchmea({
schemaList,
tableList,
columnList: columnList.map((column) => ({
COLUMN_TYPE: column.DATA_TYPE,
...column,
COLUMN_KEY: '',
EXTRA: '',
})),
constraintsList,
constraintColumnsList: [
...primaryKeyConstraint.map(
(constraint): MySQLConstraintColumnResult => ({
TABLE_SCHEMA: constraint.schema_name,
TABLE_NAME: constraint.table_name,
COLUMN_NAME: constraint.column_name,
CONSTRAINT_NAME: constraint.constraint_name,
REFERENCED_TABLE_SCHEMA: '',
REFERENCED_TABLE_NAME: '',
REFERENCED_COLUMN_NAME: '',
})
),
...foreignKeyConstraint.map(
(constraint): MySQLConstraintColumnResult => ({
TABLE_SCHEMA: constraint.fk_schema_name,
TABLE_NAME: constraint.fk_table_name,
COLUMN_NAME: constraint.fk_column_name,
CONSTRAINT_NAME: constraint.fk_name,
REFERENCED_TABLE_SCHEMA: constraint.pk_schema_name,
REFERENCED_TABLE_NAME: constraint.pk_table_name,
REFERENCED_COLUMN_NAME: constraint.pk_column_name,
})
),
],
});
}

createTable(
schemaName: string | undefined,
tableName: string,
columns: TableColumn[]
): Promise<QueryResult> {
const tempColumns = structuredClone(columns);
for (const column of tempColumns) {
if (column.definition.references) {
column.definition.references.table = schemaName
? `${schemaName}.${column.definition.references.table}`
: column.definition.references.table;
}
}

return super.createTable(schemaName, tableName, tempColumns);
}

async renameTable(
schemaName: string | undefined,
tableName: string,
newTableName: string
): Promise<QueryResult> {
// Schema is required for rename
return super.renameTable(
schemaName,
tableName,
schemaName ? `${schemaName}.${newTableName}` : newTableName
);
}

async query<T = Record<string, unknown>>(
query: Query
): Promise<QueryResult<T>> {
try {
const [err, headers, rows] = await new Promise<
[snowflake.SnowflakeError | undefined, string[], unknown[][]]
>((resolve) => {
this.db.execute({
sqlText: query.query,
binds: query.parameters as snowflake.Binds,
rowMode: 'array',
complete: (err, stmt, rows) => {
resolve([
err,
err
? []
: stmt.getColumns().map((col) => col.getName()),
rows as unknown[][],
]);
},
});
});

if (err) return createErrorResult(err.message) as QueryResult<T>;
return transformArrayBasedResult(
headers,
(header) => ({
name: header,
}),
rows
) as QueryResult<T>;
} catch (e) {
return createErrorResult('Unknown error') as QueryResult<T>;
}
}
}
1 change: 1 addition & 0 deletions src/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -7,5 +7,6 @@ export * from './connections/mongodb';
export * from './connections/mysql';
export * from './connections/postgre/postgresql';
export * from './connections/sqlite/turso';
export * from './connections/snowflake/snowflake';
export * from './client';
export * from './models/decorators';
16 changes: 14 additions & 2 deletions tests/connections/create-test-connection.ts
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
import { Client as PgClient } from 'pg';
import { BigQuery } from '@google-cloud/bigquery';
import duckDB from 'duckdb';
import snowflake from 'snowflake-sdk';
import { createClient as createTursoConnection } from '@libsql/client';
import { createConnection as createMySqlConnection } from 'mysql2';
import {
Expand All @@ -13,6 +14,7 @@ import {
MongoDBConnection,
DuckDBConnection,
StarbaseConnection,
SnowflakeConnection,
} from '../../src';
import { MongoClient } from 'mongodb';

Expand Down Expand Up @@ -86,8 +88,8 @@ export default function createTestClient(): {
const client = new DuckDBConnection(
process.env.MOTHERDUCK_PATH
? new duckDB.Database(process.env.MOTHERDUCK_PATH, {
motherduck_token: process.env.MOTHERDUCK_TOKEN as string,
})
motherduck_token: process.env.MOTHERDUCK_TOKEN as string,
})
: new duckDB.Database(':memory:')
);
return { client, defaultSchema: 'main' };
Expand All @@ -98,6 +100,16 @@ export default function createTestClient(): {
});

return { client, defaultSchema: 'main' };
} else if (process.env.CONNECTION_TYPE === 'snowflake') {
const client = new SnowflakeConnection(snowflake.createConnection({
database: process.env.SNOWFLAKE_DATABASE as string,
username: process.env.SNOWFLAKE_USERNAME as string,
password: process.env.SNOWFLAKE_PASSWORD as string,
account: process.env.SNOWFLAKE_ACCOUNT_ID as string,
warehouse: process.env.SNOKWFLAKE_WAREHOUSE as string,
}));

return { client, defaultSchema: "PUBLIC" }
}

throw new Error('Invalid connection type');
Expand Down

0 comments on commit 40ce6e8

Please sign in to comment.