From 38c149b14733217dc423dade968c02103448774f Mon Sep 17 00:00:00 2001 From: BenBirt Date: Fri, 9 Aug 2019 15:35:25 +0200 Subject: [PATCH] Azure SQL Data Warehouse support. (#351) * initial commit, added sqldatawarehouse support * code review fixes: consistency, use const, async. * some cleanup * some more cleanup * slightly more cleanup * integration tests * lewis comments * version bump * creds --- api/BUILD | 1 + api/api.package.json | 1 + api/commands/credentials.ts | 10 +- api/commands/state.ts | 2 +- api/dbadapters/bigquery.ts | 8 +- api/dbadapters/index.ts | 2 + api/dbadapters/sqldatawarehouse.ts | 104 ++++++++++++++++ assertion_utils/index.ts | 9 +- cli/credentials.ts | 16 +++ cli/index.ts | 14 +-- core/adapters/base.ts | 5 +- core/adapters/index.ts | 29 +++-- core/adapters/sqldatawarehouse.ts | 111 +++++++++++++++++ core/session.ts | 8 ++ core/table.ts | 10 ++ core/utils.ts | 16 +++ crossdb/index.ts | 3 +- package.json | 2 + protos/core.proto | 5 + protos/profiles.proto | 8 ++ scripts/decode_credentials | 22 ++-- test_credentials/.gitignore | 5 +- test_credentials/sqldatawarehouse.json.enc | Bin 0 -> 271 bytes tests/api/api.spec.ts | 4 +- tests/integration/BUILD | 29 +++++ tests/integration/sqldatawarehouse.spec.ts | 101 +++++++++++++++ .../sqldatawarehouse_project/BUILD | 20 +++ .../sqldatawarehouse_project/dataform.json | 5 + .../definitions/example_assertion_fail.sqlx | 3 + .../definitions/example_assertion_pass.sqlx | 5 + .../example_assertion_uniqueness_fail.sqlx | 5 + .../example_assertion_uniqueness_pass.sqlx | 5 + .../definitions/example_incremental.sqlx | 13 ++ .../definitions/example_table.sqlx | 3 + .../definitions/example_view.sqlx | 3 + .../definitions/sample_data.sqlx | 5 + .../definitions/sample_data_2.sqlx | 5 + .../definitions/test.js | 88 ++++++++++++++ version.bzl | 2 +- yarn.lock | 115 ++++++++++++++++-- 40 files changed, 741 insertions(+), 61 deletions(-) create mode 100644 api/dbadapters/sqldatawarehouse.ts create mode 100644 core/adapters/sqldatawarehouse.ts create mode 100644 test_credentials/sqldatawarehouse.json.enc create mode 100644 tests/integration/sqldatawarehouse.spec.ts create mode 100644 tests/integration/sqldatawarehouse_project/BUILD create mode 100644 tests/integration/sqldatawarehouse_project/dataform.json create mode 100644 tests/integration/sqldatawarehouse_project/definitions/example_assertion_fail.sqlx create mode 100644 tests/integration/sqldatawarehouse_project/definitions/example_assertion_pass.sqlx create mode 100644 tests/integration/sqldatawarehouse_project/definitions/example_assertion_uniqueness_fail.sqlx create mode 100644 tests/integration/sqldatawarehouse_project/definitions/example_assertion_uniqueness_pass.sqlx create mode 100644 tests/integration/sqldatawarehouse_project/definitions/example_incremental.sqlx create mode 100644 tests/integration/sqldatawarehouse_project/definitions/example_table.sqlx create mode 100644 tests/integration/sqldatawarehouse_project/definitions/example_view.sqlx create mode 100644 tests/integration/sqldatawarehouse_project/definitions/sample_data.sqlx create mode 100644 tests/integration/sqldatawarehouse_project/definitions/sample_data_2.sqlx create mode 100644 tests/integration/sqldatawarehouse_project/definitions/test.js diff --git a/api/BUILD b/api/BUILD index feaa0b850..c49633940 100644 --- a/api/BUILD +++ b/api/BUILD @@ -16,6 +16,7 @@ ts_library( "@npm//cron-parser", "@npm//glob", "@npm//node-redshift", + "@npm//mssql", "@npm//pretty-ms", "@npm//promise-pool-executor", "@npm//protobufjs", diff --git a/api/api.package.json b/api/api.package.json index 80603a3b1..e28d376c1 100644 --- a/api/api.package.json +++ b/api/api.package.json @@ -12,6 +12,7 @@ "cron-parser": "^2.11.0", "glob": "^7.1.3", "node-redshift": "^0.1.5", + "mssql": "^5.1.0", "pretty-ms": "^4.0.0", "promise-pool-executor": "^1.1.1", "snowflake-sdk": "^1.1.5", diff --git a/api/commands/credentials.ts b/api/commands/credentials.ts index a13be5b90..c8f8af98a 100644 --- a/api/commands/credentials.ts +++ b/api/commands/credentials.ts @@ -5,7 +5,7 @@ import * as fs from "fs"; export const CREDENTIALS_FILENAME = ".df-credentials.json"; -export type Credentials = dataform.IBigQuery | dataform.IJDBC | dataform.ISnowflake; +export type Credentials = dataform.IBigQuery | dataform.IJDBC | dataform.ISnowflake | dataform.ISQLDataWarehouse; export function read(warehouse: string, credentialsPath: string): Credentials { if (!fs.existsSync(credentialsPath)) { @@ -41,6 +41,14 @@ export function coerce(warehouse: string, credentials: any): Credentials { requiredWarehouseProps[warehouse] ); } + case WarehouseType.SQLDATAWAREHOUSE: { + return validateAnyAsCredentials( + credentials, + dataform.SQLDataWarehouse.verify, + dataform.SQLDataWarehouse.create, + requiredWarehouseProps[warehouse] + ) + } default: throw new Error(`Unrecognized warehouse: ${warehouse}`); } diff --git a/api/commands/state.ts b/api/commands/state.ts index 413c7135c..2fde4c90c 100644 --- a/api/commands/state.ts +++ b/api/commands/state.ts @@ -1,5 +1,5 @@ +import { IDbAdapter } from "@dataform/api/dbadapters"; import { dataform } from "@dataform/protos"; -import { IDbAdapter } from "../dbadapters"; export function state( compiledGraph: dataform.ICompiledGraph, diff --git a/api/dbadapters/bigquery.ts b/api/dbadapters/bigquery.ts index 68c8a9fcb..1f396953d 100644 --- a/api/dbadapters/bigquery.ts +++ b/api/dbadapters/bigquery.ts @@ -154,9 +154,7 @@ export class BigQueryDbAdapter implements IDbAdapter { return rowsResult[0]; } return this.execute( - `SELECT * FROM \`${metadata.tableReference.projectId}.${metadata.tableReference.datasetId}.${ - metadata.tableReference.tableId - }\` LIMIT ${limitRows}` + `SELECT * FROM \`${metadata.tableReference.projectId}.${metadata.tableReference.datasetId}.${metadata.tableReference.tableId}\` LIMIT ${limitRows}` ); } @@ -175,9 +173,7 @@ export class BigQueryDbAdapter implements IDbAdapter { if (metadata.location.toUpperCase() !== location.toUpperCase()) { throw new Error( - `Cannot create dataset "${schema}" in location "${location}". It already exists in location "${ - metadata.location - }". Change your default dataset location or delete the existing dataset.` + `Cannot create dataset "${schema}" in location "${location}". It already exists in location "${metadata.location}". Change your default dataset location or delete the existing dataset.` ); } } diff --git a/api/dbadapters/index.ts b/api/dbadapters/index.ts index f4a56056b..0aa4b8033 100644 --- a/api/dbadapters/index.ts +++ b/api/dbadapters/index.ts @@ -2,6 +2,7 @@ import { Credentials } from "@dataform/api/commands/credentials"; import { BigQueryDbAdapter } from "@dataform/api/dbadapters/bigquery"; import { RedshiftDbAdapter } from "@dataform/api/dbadapters/redshift"; import { SnowflakeDbAdapter } from "@dataform/api/dbadapters/snowflake"; +import { SQLDataWarehouseDBAdapter } from "@dataform/api/dbadapters/sqldatawarehouse"; import { dataform } from "@dataform/protos"; export type OnCancel = (handleCancel: () => void) => void; @@ -37,3 +38,4 @@ register("bigquery", BigQueryDbAdapter); register("postgres", RedshiftDbAdapter); register("redshift", RedshiftDbAdapter); register("snowflake", SnowflakeDbAdapter); +register("sqldatawarehouse", SQLDataWarehouseDBAdapter); diff --git a/api/dbadapters/sqldatawarehouse.ts b/api/dbadapters/sqldatawarehouse.ts new file mode 100644 index 000000000..6fea96000 --- /dev/null +++ b/api/dbadapters/sqldatawarehouse.ts @@ -0,0 +1,104 @@ +import { Credentials } from "@dataform/api/commands/credentials"; +import { IDbAdapter, OnCancel } from "@dataform/api/dbadapters/index"; +import { dataform } from "@dataform/protos"; +import { ConnectionPool } from "mssql"; + +const INFORMATION_SCHEMA_SCHEMA_NAME = "information_schema"; +const TABLE_NAME_COL_NAME = "table_name"; +const TABLE_SCHEMA_COL_NAME = "table_schema"; +const TABLE_TYPE_COL_NAME = "table_type"; +const COLUMN_NAME_COL_NAME = "column_name"; +const DATA_TYPE_COL_NAME = "data_type"; +const IS_NULLABLE_COL_NAME = "is_nullable"; + +export class SQLDataWarehouseDBAdapter implements IDbAdapter { + private pool: Promise; + + constructor(credentials: Credentials) { + const sqlDataWarehouseCredentials = credentials as dataform.ISQLDataWarehouse; + this.pool = new Promise((resolve, reject) => { + const conn = new ConnectionPool({ + server: sqlDataWarehouseCredentials.server, + port: sqlDataWarehouseCredentials.port, + user: sqlDataWarehouseCredentials.username, + password: sqlDataWarehouseCredentials.password, + database: sqlDataWarehouseCredentials.database, + options: { + encrypt: true + } + }).connect(); + conn + .then(pool => { + pool.on("error", err => { + throw new Error(err); + }); + resolve(conn); + }) + .catch(e => reject(e)); + }); + } + + public async execute(statement: string, onCancel?: OnCancel) { + const request = (await this.pool).request(); + if (onCancel) { + onCancel(() => request.cancel()); + } + return (await request.query(statement)).recordset; + } + + public async evaluate(statement: string) { + await this.execute(`explain ${statement}`); + } + + public async tables(): Promise { + const result = await this.execute( + `select ${TABLE_SCHEMA_COL_NAME}, ${TABLE_NAME_COL_NAME} from ${INFORMATION_SCHEMA_SCHEMA_NAME}.tables` + ); + return result.map(row => ({ + schema: row[TABLE_SCHEMA_COL_NAME], + name: row[TABLE_NAME_COL_NAME] + })); + } + + public async table(target: dataform.ITarget): Promise { + const [tableData, columnData] = await Promise.all([ + this.execute( + `select ${TABLE_TYPE_COL_NAME} from ${INFORMATION_SCHEMA_SCHEMA_NAME}.tables + where ${TABLE_SCHEMA_COL_NAME} = '${target.schema}' AND ${TABLE_NAME_COL_NAME} = '${target.name}'` + ), + this.execute( + `select ${COLUMN_NAME_COL_NAME}, ${DATA_TYPE_COL_NAME}, ${IS_NULLABLE_COL_NAME} + from ${INFORMATION_SCHEMA_SCHEMA_NAME}.columns + where ${TABLE_SCHEMA_COL_NAME} = '${target.schema}' AND ${TABLE_NAME_COL_NAME} = '${target.name}'` + ) + ]); + + if (tableData.length === 0) { + throw new Error(`Could not find relation: ${target.schema}.${target.name}`); + } + + // The table exists. + return { + target, + type: tableData[0][TABLE_TYPE_COL_NAME] === "VIEW" ? "view" : "table", + fields: columnData.map(row => ({ + name: row[COLUMN_NAME_COL_NAME], + primitive: row[DATA_TYPE_COL_NAME], + flags: row[IS_NULLABLE_COL_NAME] && row[IS_NULLABLE_COL_NAME] === "YES" ? ["nullable"] : [] + })) + }; + } + + public async preview(target: dataform.ITarget, limitRows: number = 10): Promise { + return this.execute(`SELECT * FROM "${target.schema}"."${target.name}" LIMIT ${limitRows}`); + } + + public async prepareSchema(schema: string): Promise { + await this.execute( + `if not exists ( select schema_name from ${INFORMATION_SCHEMA_SCHEMA_NAME}.schemata where schema_name = '${schema}' ) + begin + exec sp_executesql N'create schema ${schema}' + end ` + ); + } +} diff --git a/assertion_utils/index.ts b/assertion_utils/index.ts index 494593dc1..749613250 100644 --- a/assertion_utils/index.ts +++ b/assertion_utils/index.ts @@ -19,12 +19,11 @@ export class DatasetAssertion { WITH base AS ( SELECT - ${this.groupCols.map((field, i) => `${field} as c_${i}`).join(", ")}, + ${this.groupCols.join(", ")}, SUM(1) as row_count FROM ${this.dataset} GROUP BY - ${this.groupCols.map((field, i) => `${i+1}`).join(", ")} - + ${this.groupCols.join(", ")} ) SELECT @@ -33,6 +32,6 @@ export class DatasetAssertion { base WHERE row_count > 1 - ` + `; } -} \ No newline at end of file +} diff --git a/cli/credentials.ts b/cli/credentials.ts index 8a6ca72bb..9f334d3ec 100644 --- a/cli/credentials.ts +++ b/cli/credentials.ts @@ -38,6 +38,22 @@ export function getRedshiftCredentials() { ); } +export function getSQLDataWarehouseCredentials(): dataform.ISQLDataWarehouse { + const server = question("Enter your server name (for example 'name.database.windows.net'):"); + const port = intQuestion("Enter your server port:", 1433); + const username = question("Enter your datawarehouse user:"); + const password = passwordQuestion("Enter your datawarehouse password:"); + const database = question("Enter the database name:"); + + return { + server, + port, + username, + password, + database + }; +} + export function getSnowflakeCredentials(): dataform.ISnowflake { const accountId = question( "Enter your Snowflake account identifier, including region (for example 'myaccount.us-east-1'):" diff --git a/cli/index.ts b/cli/index.ts index 5bfe4cbb8..68929b810 100644 --- a/cli/index.ts +++ b/cli/index.ts @@ -19,7 +19,8 @@ import { getBigQueryCredentials, getPostgresCredentials, getRedshiftCredentials, - getSnowflakeCredentials + getSnowflakeCredentials, + getSQLDataWarehouseCredentials } from "@dataform/cli/credentials"; import { actuallyResolve, assertPathExists, compiledGraphHasErrors } from "@dataform/cli/util"; import { createYargsCli, INamedOption } from "@dataform/cli/yargswrapper"; @@ -49,9 +50,7 @@ const projectDirMustExistOption = { assertPathExists(path.resolve(argv["project-dir"], "dataform.json")); } catch (e) { throw new Error( - `${ - argv["project-dir"] - } does not appear to be a dataform directory (missing dataform.json file).` + `${argv["project-dir"]} does not appear to be a dataform directory (missing dataform.json file).` ); } } @@ -206,9 +205,7 @@ const builtYargs = createYargsCli({ }, { format: "init-creds [project-dir]", - description: `Create a ${ - credentials.CREDENTIALS_FILENAME - } file for dataform to use when accessing your warehouse.`, + description: `Create a ${credentials.CREDENTIALS_FILENAME} file for Dataform to use when accessing your warehouse.`, positionalOptions: [warehouseOption, projectDirMustExistOption], options: [ { @@ -232,6 +229,9 @@ const builtYargs = createYargsCli({ case "redshift": { return getRedshiftCredentials(); } + case "sqldatawarehouse": { + return getSQLDataWarehouseCredentials(); + } case "snowflake": { return getSnowflakeCredentials(); } diff --git a/core/adapters/base.ts b/core/adapters/base.ts index 455896e06..68dcc5d00 100644 --- a/core/adapters/base.ts +++ b/core/adapters/base.ts @@ -2,8 +2,7 @@ import { dataform } from "@dataform/protos"; export abstract class Adapter { public where(query: string, where: string) { - return `select * from ( - ${query}) + return `select * from (${query}) as subquery where ${where}`; } @@ -23,7 +22,7 @@ export abstract class Adapter { insert into ${this.resolveTarget(target)} (${columns.join(",")}) select ${columns.join(",")} - from (${query})`; + from (${query}) as insertions`; } public dropIfExists(target: dataform.ITarget, type: string) { diff --git a/core/adapters/index.ts b/core/adapters/index.ts index ccc85289b..fff026a7c 100644 --- a/core/adapters/index.ts +++ b/core/adapters/index.ts @@ -1,8 +1,9 @@ +import { BigQueryAdapter } from "@dataform/core/adapters/bigquery"; +import { RedshiftAdapter } from "@dataform/core/adapters/redshift"; +import { SnowflakeAdapter } from "@dataform/core/adapters/snowflake"; +import { SQLDataWarehouseAdapter } from "@dataform/core/adapters/sqldatawarehouse"; +import { Tasks } from "@dataform/core/tasks"; import { dataform } from "@dataform/protos"; -import { Tasks } from "../tasks"; -import { BigQueryAdapter } from "./bigquery"; -import { RedshiftAdapter } from "./redshift"; -import { SnowflakeAdapter } from "./snowflake"; export interface IAdapter { resolveTarget(target: dataform.ITarget): string; @@ -26,11 +27,16 @@ export enum WarehouseType { BIGQUERY = "bigquery", POSTGRES = "postgres", REDSHIFT = "redshift", - SNOWFLAKE = "snowflake" + SNOWFLAKE = "snowflake", + SQLDATAWAREHOUSE = "sqldatawarehouse" } +const CANCELLATION_SUPPORTED = [WarehouseType.BIGQUERY, WarehouseType.SQLDATAWAREHOUSE]; + export function supportsCancel(warehouseType: WarehouseType) { - return warehouseType === WarehouseType.BIGQUERY; + return CANCELLATION_SUPPORTED.some(w => { + return w === warehouseType; + }); } const requiredBigQueryWarehouseProps: Array = [ @@ -52,12 +58,20 @@ const requiredSnowflakeWarehouseProps: Array = [ "databaseName", "warehouse" ]; +const requiredSQLDataWarehouseProps: Array = [ + "server", + "port", + "username", + "password", + "database" +]; export const requiredWarehouseProps = { [WarehouseType.BIGQUERY]: requiredBigQueryWarehouseProps, [WarehouseType.POSTGRES]: requiredJdbcWarehouseProps, [WarehouseType.REDSHIFT]: requiredJdbcWarehouseProps, - [WarehouseType.SNOWFLAKE]: requiredSnowflakeWarehouseProps + [WarehouseType.SNOWFLAKE]: requiredSnowflakeWarehouseProps, + [WarehouseType.SQLDATAWAREHOUSE]: requiredSQLDataWarehouseProps }; const registry: { [warehouseType: string]: AdapterConstructor } = {}; @@ -80,3 +94,4 @@ register("bigquery", BigQueryAdapter); register("postgres", RedshiftAdapter); register("redshift", RedshiftAdapter); register("snowflake", SnowflakeAdapter); +register("sqldatawarehouse", SQLDataWarehouseAdapter); diff --git a/core/adapters/sqldatawarehouse.ts b/core/adapters/sqldatawarehouse.ts new file mode 100644 index 000000000..cd415030f --- /dev/null +++ b/core/adapters/sqldatawarehouse.ts @@ -0,0 +1,111 @@ +import { Adapter } from "@dataform/core/adapters/base"; +import { IAdapter } from "@dataform/core/adapters/index"; +import { Task, Tasks } from "@dataform/core/tasks"; +import { dataform } from "@dataform/protos"; + +export class SQLDataWarehouseAdapter extends Adapter implements IAdapter { + public resolveTarget(target: dataform.ITarget) { + return `"${target.schema}"."${target.name}"`; + } + + public publishTasks( + table: dataform.ITable, + runConfig: dataform.IRunConfig, + tableMetadata: dataform.ITableMetadata + ): Tasks { + const tasks = Tasks.create(); + // Drop the existing view or table if we are changing its type. + if (tableMetadata && tableMetadata.type !== this.baseTableType(table.type)) { + tasks.add( + Task.statement(this.dropIfExists(table.target, this.oppositeTableType(table.type))) + ); + } + if (table.type === "incremental") { + if (runConfig.fullRefresh || !tableMetadata || tableMetadata.type === "view") { + tasks.addAll(this.createOrReplace(table, !!tableMetadata)); + } else { + // The table exists, insert new rows. + tasks.add( + Task.statement( + this.insertInto( + table.target, + tableMetadata.fields.map(f => f.name), + this.where(table.query, table.where) + ) + ) + ); + } + } else { + tasks.addAll(this.createOrReplace(table, !!tableMetadata)); + } + return tasks; + } + + public assertTasks( + assertion: dataform.IAssertion, + projectConfig: dataform.IProjectConfig + ): Tasks { + const target = + assertion.target || + dataform.Target.create({ + schema: projectConfig.assertionSchema, + name: assertion.name + }); + + return Tasks.create() + .add(Task.statement(this.dropIfExists(target, "view"))) + .add( + Task.statement(` + create view ${this.resolveTarget(target)} + as ${assertion.query}`) + ) + .add(Task.assertion(`select sum(1) as row_count from ${this.resolveTarget(target)}`)); + } + + public dropIfExists(target: dataform.ITarget, type: string) { + if (type === "view") { + return `drop ${this.baseTableType(type)} if exists ${this.resolveTarget(target)} `; + } + return `if object_id ('${this.resolveTarget( + target + )}','U') is not null drop table ${this.resolveTarget(target)}`; + } + + private createOrReplace(table: dataform.ITable, alreadyExists: boolean) { + if (table.type === "view") { + return Tasks.create().add( + Task.statement( + `${alreadyExists ? "alter" : "create"} view ${this.resolveTarget(table.target)} as ${ + table.query + }` + ) + ); + } + const tempTableTarget = dataform.Target.create({ + schema: table.target.schema, + name: table.target.name + "_temp" + }); + + return Tasks.create() + .add(Task.statement(this.dropIfExists(tempTableTarget, this.baseTableType(table.type)))) + .add(Task.statement(this.createTable(table, tempTableTarget))) + .add(Task.statement(this.dropIfExists(table.target, "table"))) + .add( + Task.statement( + `rename object ${this.resolveTarget(tempTableTarget)} to ${table.target.name} ` + ) + ); + } + + private createTable(table: dataform.ITable, target: dataform.ITarget) { + const distribution = + table.sqlDataWarehouse && table.sqlDataWarehouse.distribution + ? table.sqlDataWarehouse.distribution + : "ROUND_ROBIN"; // default + return `create table ${this.resolveTarget(target)} + with( + distribution = ${distribution} + ) + as ${table.query}`; + } +} diff --git a/core/session.ts b/core/session.ts index 452fa9be9..414e7eeb2 100644 --- a/core/session.ts +++ b/core/session.ts @@ -152,6 +152,14 @@ export class Session { if (actionOptions.sqlxConfig.redshift && !this.isDatasetType(actionOptions.sqlxConfig.type)) { this.compileError("Actions may only specify 'redshift: { ... }' if they create a dataset."); } + if ( + actionOptions.sqlxConfig.sqldatawarehouse && + !this.isDatasetType(actionOptions.sqlxConfig.type) + ) { + this.compileError( + "Actions may only specify 'sqldatawarehouse: { ... }' if they create a dataset." + ); + } if (actionOptions.sqlxConfig.bigquery && !this.isDatasetType(actionOptions.sqlxConfig.type)) { this.compileError("Actions may only specify 'bigquery: { ... }' if they create a dataset."); } diff --git a/core/table.ts b/core/table.ts index b935983b7..d2c81cf74 100644 --- a/core/table.ts +++ b/core/table.ts @@ -23,6 +23,7 @@ export const ignoredProps: { [TableTypes.INLINE]: [ "bigquery", "redshift", + "sqlDataWarehouse", "preOps", "postOps", "actionDescriptor", @@ -45,6 +46,7 @@ export interface TConfig { protected?: boolean; redshift?: dataform.IRedshiftOptions; bigquery?: dataform.IBigQueryOptions; + sqldatawarehouse?: dataform.ISQLDataWarehouseOptions; } export class Table { @@ -79,6 +81,9 @@ export class Table { if (config.bigquery) { this.bigquery(config.bigquery); } + if (config.sqldatawarehouse) { + this.sqldatawarehouse(config.sqldatawarehouse); + } if (config.tags) { this.tags(config.tags); } @@ -122,6 +127,11 @@ export class Table { return this; } + public sqldatawarehouse(sqlDataWarehouse: dataform.ISQLDataWarehouseOptions) { + this.proto.sqlDataWarehouse = dataform.SQLDataWarehouseOptions.create(sqlDataWarehouse); + return this; + } + public redshift(redshift: dataform.IRedshiftOptions) { this.proto.redshift = dataform.RedshiftOptions.create(redshift); return this; diff --git a/core/utils.ts b/core/utils.ts index b859c3abb..fb21ed0b0 100644 --- a/core/utils.ts +++ b/core/utils.ts @@ -1,6 +1,8 @@ import { dataform } from "@dataform/protos"; import { DistStyleTypes, ignoredProps, SortStyleTypes, TableTypes } from "./table"; +const SQL_DATA_WAREHOUSE_DIST_HASH_REGEXP = new RegExp("HASH\\s*\\(\\s*\\w*\\s*\\)\\s*"); + export function relativePath(path: string, base: string) { if (base.length == 0) { return path; @@ -181,6 +183,20 @@ export function validate(compiledGraph: dataform.ICompiledGraph): dataform.IGrap validationErrors.push(dataform.ValidationError.create({ message, actionName })); } + // sqldatawarehouse config + if (action.sqlDataWarehouse && action.sqlDataWarehouse.distribution) { + const distribution = action.sqlDataWarehouse.distribution.toUpperCase(); + + if ( + distribution !== "REPLICATE" && + distribution !== "ROUND_ROBIN" && + !SQL_DATA_WAREHOUSE_DIST_HASH_REGEXP.test(distribution) + ) { + const message = `Invalid value for sqldatawarehouse distribution: "${distribution}"`; + validationErrors.push(dataform.ValidationError.create({ message, actionName })); + } + } + // redshift config if (!!action.redshift) { if ( diff --git a/crossdb/index.ts b/crossdb/index.ts index 7cac57800..8f86eadd3 100644 --- a/crossdb/index.ts +++ b/crossdb/index.ts @@ -2,6 +2,7 @@ export function currentTimestampUTC(warehouse?: string): string { return ({ bigquery: "current_timestamp()", redshift: "current_timestamp::timestamp", - snowflake: "convert_timezone('UTC', current_timestamp())::timestamp" + snowflake: "convert_timezone('UTC', current_timestamp())::timestamp", + sqldatawarehouse: "CURRENT_TIMESTAMP" } as { [key: string]: string })[warehouse || (global as any).session.config.warehouse]; } diff --git a/package.json b/package.json index 657ec82cc..8dd2d547c 100644 --- a/package.json +++ b/package.json @@ -13,6 +13,7 @@ "@types/long": "^4.0.0", "@types/mocha": "^5.2.5", "@types/moo": "^0.5.0", + "@types/mssql": "^4.0.15", "@types/next": "^7.0.5", "@types/node": "^9.6.47", "@types/react": "^16.7.17", @@ -37,6 +38,7 @@ "minimist": "^1.2.0", "mocha": "^5.2.0", "moo": "^0.5.0", + "mssql": "^5.1.0", "next": "^8.0.4", "next-images": "^1.0.4", "node-redshift": "^0.1.5", diff --git a/protos/core.proto b/protos/core.proto index d48c87607..04953a0eb 100644 --- a/protos/core.proto +++ b/protos/core.proto @@ -56,6 +56,10 @@ message RedshiftOptions { string sort_style = 4; } +message SQLDataWarehouseOptions { + string distribution = 1; +} + message GraphErrors { repeated CompilationError compilation_errors = 1; repeated ValidationError validation_errors = 2; @@ -110,6 +114,7 @@ message Table { // Warehouse specific features. RedshiftOptions redshift = 21; BigQueryOptions bigquery = 22; + SQLDataWarehouseOptions sql_data_warehouse = 25; // Generated. string file_name = 18; diff --git a/protos/profiles.proto b/protos/profiles.proto index 9bd846469..719155965 100644 --- a/protos/profiles.proto +++ b/protos/profiles.proto @@ -30,3 +30,11 @@ message BigQuery { reserved 2; } + +message SQLDataWarehouse { + string server = 1; + int32 port = 2; + string username = 3; + string password = 4; + string database = 5; +} diff --git a/scripts/decode_credentials b/scripts/decode_credentials index 89fdde054..03c8609c1 100755 --- a/scripts/decode_credentials +++ b/scripts/decode_credentials @@ -1,17 +1,9 @@ #!/bin/bash -gcloud kms decrypt \ - --plaintext-file=test_credentials/bigquery.json \ - --ciphertext-file=test_credentials/bigquery.json.enc \ - --keyring=dataform-co-build --key=dataform-co-build --location=global - -gcloud kms decrypt \ - --plaintext-file=test_credentials/redshift.json \ - --ciphertext-file=test_credentials/redshift.json.enc \ - --keyring=dataform-co-build --key=dataform-co-build --location=global - - -gcloud kms decrypt \ - --plaintext-file=test_credentials/snowflake.json \ - --ciphertext-file=test_credentials/snowflake.json.enc \ - --keyring=dataform-co-build --key=dataform-co-build --location=global +for warehouse in bigquery redshift snowflake sqldatawarehouse +do + gcloud kms decrypt \ + --plaintext-file=test_credentials/${warehouse}.json \ + --ciphertext-file=test_credentials/${warehouse}.json.enc \ + --keyring=dataform-co-build --key=dataform-co-build --location=global +done diff --git a/test_credentials/.gitignore b/test_credentials/.gitignore index d2c988eaa..5f70ad974 100644 --- a/test_credentials/.gitignore +++ b/test_credentials/.gitignore @@ -1,5 +1,2 @@ # Ignore everything in here, except the build file and the gitignore file. -* -!BUILD -!.gitignore -!.enc +*.json diff --git a/test_credentials/sqldatawarehouse.json.enc b/test_credentials/sqldatawarehouse.json.enc new file mode 100644 index 0000000000000000000000000000000000000000..4622efc0119d74c36e2a3ab7b4343c9a95050573 GIT binary patch literal 271 zcmV+q0r36`BmkJa2q3sH6I-t`abV*#`TfG##7_%>L|a@|b=GJR4cs-CY!c=H0Etfz zshTh2v}b_YEpjxa992rif6Wwlc{0R{i5+=`gjx*r!*_-xdf&-F2@E`xma8M}0aD5l zgL&OE?R$E6d;tFDk&apGOe@QH3?DjmW5$bO1(;_{l6RNcW!e}F^3qt^m-J7A%<5?-nV(ew-!@-!c6 V_Q1HM3RNDb%};{tW7;W@H&%e7hIjw~ literal 0 HcmV?d00001 diff --git a/tests/api/api.spec.ts b/tests/api/api.spec.ts index cb2b61a65..8cdc2ed6b 100644 --- a/tests/api/api.spec.ts +++ b/tests/api/api.spec.ts @@ -309,9 +309,9 @@ describe("@dataform/api", () => { cleanSql( `insert into \`schema.incremental\` (existing_field) select existing_field from ( - select * from (select 1 as test) + select * from (select 1 as test) as subquery where true - )` + ) as insertions` ) ); }); diff --git a/tests/integration/BUILD b/tests/integration/BUILD index a95142f1f..b2616c22d 100644 --- a/tests/integration/BUILD +++ b/tests/integration/BUILD @@ -104,3 +104,32 @@ mocha_node_test( "@npm//mocha", ], ) + +ts_library( + name = "sqldatawarehouse_lib", + srcs = glob(["sqldatawarehouse.spec.ts"]), + deps = [ + ":utils", + "//api", + "//core", + "//protos", + "//tests/utils", + "@npm//@types/chai", + "@npm//@types/mocha", + "@npm//@types/node", + ], +) + +mocha_node_test( + name = "sqldatawarehouse", + srcs = [ + ":sqldatawarehouse_lib", + ], + data = [ + "//test_credentials", + "//tests/integration/sqldatawarehouse_project:files", + "//tests/integration/sqldatawarehouse_project:node_modules", + "@npm//chai", + "@npm//mocha", + ], +) diff --git a/tests/integration/sqldatawarehouse.spec.ts b/tests/integration/sqldatawarehouse.spec.ts new file mode 100644 index 000000000..1f0dc6a9b --- /dev/null +++ b/tests/integration/sqldatawarehouse.spec.ts @@ -0,0 +1,101 @@ +import * as dfapi from "@dataform/api"; +import * as dbadapters from "@dataform/api/dbadapters"; +import * as adapters from "@dataform/core/adapters"; +import { dataform } from "@dataform/protos"; +import { expect } from "chai"; +import { dropAllTables, getTableRows, keyBy } from "df/tests/integration/utils"; + +describe("@dataform/integration/sqldatawarehouse", () => { + it("run", async () => { + const credentials = dfapi.credentials.read( + "sqldatawarehouse", + "df/test_credentials/sqldatawarehouse.json" + ); + + const compiledGraph = await dfapi.compile({ + projectDir: "df/tests/integration/sqldatawarehouse_project" + }); + + expect(compiledGraph.graphErrors.compilationErrors).to.eql([]); + expect(compiledGraph.graphErrors.validationErrors).to.eql([]); + + const dbadapter = dbadapters.create(credentials, "sqldatawarehouse"); + const adapter = adapters.create(compiledGraph.projectConfig); + + // Drop all the tables before we do anything. + await dropAllTables(compiledGraph, adapter, dbadapter); + + // Run the tests. + const testResults = await dfapi.test(credentials, "sqldatawarehouse", compiledGraph.tests); + expect(testResults).to.eql([ + { name: "successful", successful: true }, + { + name: "expected more rows than got", + successful: false, + messages: ["Expected 3 rows, but saw 2 rows."] + }, + { + name: "expected fewer columns than got", + successful: false, + messages: ['Expected columns "col1,col2,col3", but saw "col1,col2,col3,col4".'] + }, + { + name: "wrong columns", + successful: false, + messages: ['Expected columns "col1,col2,col3,col4", but saw "col1,col2,col3,col5".'] + }, + { + name: "wrong row contents", + successful: false, + messages: [ + 'For row 0 and column "col2": expected "1" (number), but saw "5" (number).', + 'For row 1 and column "col3": expected "6.5" (number), but saw "12" (number).', + 'For row 2 and column "col1": expected "sup?" (string), but saw "WRONG" (string).' + ] + } + ]); + + // Run the project. + let executionGraph = await dfapi.build(compiledGraph, {}, credentials); + let executedGraph = await dfapi.run(executionGraph, credentials).resultPromise(); + + const actionMap = keyBy(executedGraph.actions, v => v.name); + + // Check the status of the two assertions. + expect(actionMap.example_assertion_fail.status).equals(dataform.ActionExecutionStatus.FAILED); + expect(actionMap.example_assertion_pass.status).equals( + dataform.ActionExecutionStatus.SUCCESSFUL + ); + + // Check the status of the two uniqueness assertions. + expect(actionMap.example_assertion_uniqueness_fail.status).equals( + dataform.ActionExecutionStatus.FAILED + ); + expect(actionMap.example_assertion_uniqueness_fail.tasks[2].error).to.eql( + "Assertion failed: query returned 1 row(s)." + ); + expect(actionMap.example_assertion_uniqueness_pass.status).equals( + dataform.ActionExecutionStatus.SUCCESSFUL + ); + + // Check the data in the incremental table. + let incrementalTable = keyBy(compiledGraph.tables, t => t.name).example_incremental; + let incrementalRows = await getTableRows(incrementalTable.target, adapter, dbadapter); + expect(incrementalRows.length).equals(1); + + // Re-run some of the actions. + executionGraph = await dfapi.build( + compiledGraph, + { actions: ["example_incremental", "example_table", "example_view"] }, + credentials + ); + + executedGraph = await dfapi.run(executionGraph, credentials).resultPromise(); + expect(executedGraph.ok).equals(true); + + // Check there is an extra row in the incremental table. + incrementalTable = keyBy(compiledGraph.tables, t => t.name).example_incremental; + incrementalRows = await getTableRows(incrementalTable.target, adapter, dbadapter); + expect(incrementalRows.length).equals(2); + }).timeout(60000); +}); diff --git a/tests/integration/sqldatawarehouse_project/BUILD b/tests/integration/sqldatawarehouse_project/BUILD new file mode 100644 index 000000000..e99b4d15e --- /dev/null +++ b/tests/integration/sqldatawarehouse_project/BUILD @@ -0,0 +1,20 @@ +package(default_visibility = ["//tests:__subpackages__"]) + +load("//tools:node_modules.bzl", "node_modules") + +filegroup( + name = "files", + srcs = glob([ + "**/*.*", + ]), +) + +node_modules( + name = "node_modules", + deps = [ + "//assertion_utils:package_tar", + "//core:package_tar", + "//crossdb:package_tar", + "//protos:package_tar", + ], +) diff --git a/tests/integration/sqldatawarehouse_project/dataform.json b/tests/integration/sqldatawarehouse_project/dataform.json new file mode 100644 index 000000000..07384d5b6 --- /dev/null +++ b/tests/integration/sqldatawarehouse_project/dataform.json @@ -0,0 +1,5 @@ +{ + "warehouse": "sqldatawarehouse", + "defaultSchema": "df_integration_test", + "assertionSchema": "df_integration_test_assertions" +} diff --git a/tests/integration/sqldatawarehouse_project/definitions/example_assertion_fail.sqlx b/tests/integration/sqldatawarehouse_project/definitions/example_assertion_fail.sqlx new file mode 100644 index 000000000..fdc9055a2 --- /dev/null +++ b/tests/integration/sqldatawarehouse_project/definitions/example_assertion_fail.sqlx @@ -0,0 +1,3 @@ +config { type: "assertion" } + +select * from ${ref("sample_data")} diff --git a/tests/integration/sqldatawarehouse_project/definitions/example_assertion_pass.sqlx b/tests/integration/sqldatawarehouse_project/definitions/example_assertion_pass.sqlx new file mode 100644 index 000000000..0fe862e05 --- /dev/null +++ b/tests/integration/sqldatawarehouse_project/definitions/example_assertion_pass.sqlx @@ -0,0 +1,5 @@ +config { type: "assertion" } + +js { const assertionUtils = require("@dataform/assertion_utils"); } + +${assertionUtils.forDataset(ref("sample_data")).groupedBy(["val"]).getUniqueRowQuery()} diff --git a/tests/integration/sqldatawarehouse_project/definitions/example_assertion_uniqueness_fail.sqlx b/tests/integration/sqldatawarehouse_project/definitions/example_assertion_uniqueness_fail.sqlx new file mode 100644 index 000000000..5252a28fd --- /dev/null +++ b/tests/integration/sqldatawarehouse_project/definitions/example_assertion_uniqueness_fail.sqlx @@ -0,0 +1,5 @@ +config { type: "assertion" } + +js { const assertionUtils = require("@dataform/assertion_utils"); } + +${assertionUtils.forDataset(ref("sample_data_2")).groupedBy(["val1"]).getUniqueRowQuery()} diff --git a/tests/integration/sqldatawarehouse_project/definitions/example_assertion_uniqueness_pass.sqlx b/tests/integration/sqldatawarehouse_project/definitions/example_assertion_uniqueness_pass.sqlx new file mode 100644 index 000000000..6816d5a12 --- /dev/null +++ b/tests/integration/sqldatawarehouse_project/definitions/example_assertion_uniqueness_pass.sqlx @@ -0,0 +1,5 @@ +config { type: "assertion" } + +js { const assertionUtils = require("@dataform/assertion_utils"); } + +${assertionUtils.forDataset(ref("sample_data_2")).groupedBy(["val1", "val2"]).getUniqueRowQuery()} diff --git a/tests/integration/sqldatawarehouse_project/definitions/example_incremental.sqlx b/tests/integration/sqldatawarehouse_project/definitions/example_incremental.sqlx new file mode 100644 index 000000000..7a2f835ad --- /dev/null +++ b/tests/integration/sqldatawarehouse_project/definitions/example_incremental.sqlx @@ -0,0 +1,13 @@ +config { + type: "incremental" +} + +js { + const crossdb = require("@dataform/crossdb"); +} + +select ${crossdb.currentTimestampUTC()} as ts + +incremental_where { + ts > (select max(ts) from ${self()}) or (select max(ts) from ${self()}) is null +} diff --git a/tests/integration/sqldatawarehouse_project/definitions/example_table.sqlx b/tests/integration/sqldatawarehouse_project/definitions/example_table.sqlx new file mode 100644 index 000000000..ae57de696 --- /dev/null +++ b/tests/integration/sqldatawarehouse_project/definitions/example_table.sqlx @@ -0,0 +1,3 @@ +config { type: "table" } + +select * from ${ref("sample_data")} as sample_data diff --git a/tests/integration/sqldatawarehouse_project/definitions/example_view.sqlx b/tests/integration/sqldatawarehouse_project/definitions/example_view.sqlx new file mode 100644 index 000000000..d303e6a8a --- /dev/null +++ b/tests/integration/sqldatawarehouse_project/definitions/example_view.sqlx @@ -0,0 +1,3 @@ +config { type: "view" } + +select * from ${ref("sample_data")} diff --git a/tests/integration/sqldatawarehouse_project/definitions/sample_data.sqlx b/tests/integration/sqldatawarehouse_project/definitions/sample_data.sqlx new file mode 100644 index 000000000..9658dc382 --- /dev/null +++ b/tests/integration/sqldatawarehouse_project/definitions/sample_data.sqlx @@ -0,0 +1,5 @@ +config { type: "view" } + +select 1 as val union all +select 2 as val union all +select 3 as val diff --git a/tests/integration/sqldatawarehouse_project/definitions/sample_data_2.sqlx b/tests/integration/sqldatawarehouse_project/definitions/sample_data_2.sqlx new file mode 100644 index 000000000..9efa68529 --- /dev/null +++ b/tests/integration/sqldatawarehouse_project/definitions/sample_data_2.sqlx @@ -0,0 +1,5 @@ +config { type: "view" } + +select 1 as val1, 1 as val2 union all +select 1 as val1, 2 as val2 union all +select 1 as val1, 3 as val2 diff --git a/tests/integration/sqldatawarehouse_project/definitions/test.js b/tests/integration/sqldatawarehouse_project/definitions/test.js new file mode 100644 index 000000000..b5893256e --- /dev/null +++ b/tests/integration/sqldatawarehouse_project/definitions/test.js @@ -0,0 +1,88 @@ +test("successful") + .dataset("example_table") + .input( + "sample_data", + ` + select 'hi' as col1, 1 as col2, 3.5 as col3, cast(1 as bit) as col4 union all + select 'ben' as col2, 2 as col2, 6.5 as col3, cast(0 as bit) as col4 union all + select 'sup?' as col3, 3 as col2, 9.5 as col3, cast(1 as bit) as col4 + ` + ) + .expect( + ` + select 'hi' as col1, 1 as col2, 3.5 as col3, cast(1 as bit) as col4 union all + select 'ben' as col2, 2 as col2, 6.5 as col3, cast(0 as bit) as col4 union all + select 'sup?' as col3, 3 as col2, 9.5 as col3, cast(1 as bit) as col4 + ` + ); + +test("expected more rows than got") + .dataset("example_table") + .input( + "sample_data", + ` + select 'hi' as col1, 1 as col2, 3.5 as col3, cast(1 as bit) as col4 union all + select 'ben' as col2, 2 as col2, 6.5 as col3, cast(0 as bit) as col4 + ` + ) + .expect( + ` + select 'hi' as col1, 1 as col2, 3.5 as col3, cast(1 as bit) as col4 union all + select 'ben' as col2, 2 as col2, 6.5 as col3, cast(0 as bit) as col4 union all + select 'sup?' as col3, 3 as col2, 9.5 as col3, cast(1 as bit) as col4 + ` + ); + +test("expected fewer columns than got") + .dataset("example_table") + .input( + "sample_data", + ` + select 'hi' as col1, 1 as col2, 3.5 as col3, cast(1 as bit) as col4 union all + select 'ben' as col2, 2 as col2, 6.5 as col3, cast(0 as bit) as col4 union all + select 'sup?' as col3, 3 as col2, 9.5 as col3, cast(1 as bit) as col4 + ` + ) + .expect( + ` + select 'hi' as col1, 1 as col2, 3.5 as col3 union all + select 'ben' as col2, 2 as col2, 6.5 as col3 union all + select 'sup?' as col3, 3 as col2, 9.5 as col3 + ` + ); + +test("wrong columns") + .dataset("example_table") + .input( + "sample_data", + ` + select 'hi' as col1, 1 as col2, 3.5 as col3, cast(1 as bit) as col5 union all + select 'ben' as col2, 2 as col2, 6.5 as col3, cast(0 as bit) as col5 union all + select 'sup?' as col3, 3 as col2, 9.5 as col3, cast(1 as bit) as col5 + ` + ) + .expect( + ` + select 'hi' as col1, 1 as col2, 3.5 as col3, cast(1 as bit) as col4 union all + select 'ben' as col2, 2 as col2, 6.5 as col3, cast(0 as bit) as col4 union all + select 'sup?' as col3, 3 as col2, 9.5 as col3, cast(1 as bit) as col4 + ` + ); + +test("wrong row contents") + .dataset("example_table") + .input( + "sample_data", + ` + select 'hi' as col1, 5 as col2, 3.5 as col3, cast(1 as bit) as col4 union all + select 'ben' as col2, 2 as col2, 12 as col3, cast(0 as bit) as col4 union all + select 'WRONG' as col3, 3 as col2, 9.5 as col3, cast(1 as bit) as col4 + ` + ) + .expect( + ` + select 'hi' as col1, 1 as col2, 3.5 as col3, cast(1 as bit) as col4 union all + select 'ben' as col2, 2 as col2, 6.5 as col3, cast(0 as bit) as col4 union all + select 'sup?' as col3, 3 as col2, 9.5 as col3, cast(1 as bit) as col4 + ` + ); diff --git a/version.bzl b/version.bzl index bf797c30c..ba3255fc2 100644 --- a/version.bzl +++ b/version.bzl @@ -1,3 +1,3 @@ # NOTE: If you change the format of this line, you must change the bash command # in /scripts/publish to extract the version string correctly. -DF_VERSION = "1.1.2" +DF_VERSION = "1.2.0" diff --git a/yarn.lock b/yarn.lock index 88b333b65..55791249a 100644 --- a/yarn.lock +++ b/yarn.lock @@ -1041,6 +1041,13 @@ resolved "https://registry.yarnpkg.com/@types/moo/-/moo-0.5.0.tgz#83fd6a9b0fa12fc7101294db69a9982a9f8d9360" integrity sha512-ldhNEcZeco2Fk38z1vhgjmn2K5wyiFrJK2nLClTx/FqsFuBvqIHkzbZUEzstISobpKeMK8oRCiK/zV3zcTLOPw== +"@types/mssql@^4.0.15": + version "4.0.15" + resolved "https://registry.yarnpkg.com/@types/mssql/-/mssql-4.0.15.tgz#41322c9f1ea6e5dc8469756b67c1c1531b8a3f4d" + integrity sha512-WRmVGqBQl6Z2/LHr096iANOpa/1aFOxrlVRH/WtNZg+QpaxMdeMBBFJlI4m/zRcMWjSryJ0dvtTZtne0zxca6w== + dependencies: + "@types/node" "*" + "@types/next-server@*": version "8.1.1" resolved "https://registry.yarnpkg.com/@types/next-server/-/next-server-8.1.1.tgz#1570d5079cf7e82e77f48826024c6e2797cddda7" @@ -1088,6 +1095,11 @@ resolved "https://registry.yarnpkg.com/@types/node/-/node-10.14.8.tgz#fe444203ecef1162348cd6deb76c62477b2cc6e9" integrity sha512-I4+DbJEhLEg4/vIy/2gkWDvXBOOtPKV9EnLhYjMoqxcRW+TTZtUftkHktz/a8suoD5mUL7m6ReLrkPvSsCQQmw== +"@types/node@^8.0.47": + version "8.10.51" + resolved "https://registry.yarnpkg.com/@types/node/-/node-8.10.51.tgz#80600857c0a47a8e8bafc2dae6daed6db58e3627" + integrity sha512-cArrlJp3Yv6IyFT/DYe+rlO8o3SIHraALbBW/+CcCYW/a9QucpLI+n2p4sRxAvl2O35TiecpX2heSZtJjvEO+Q== + "@types/node@^9.6.47": version "9.6.49" resolved "https://registry.yarnpkg.com/@types/node/-/node-9.6.49.tgz#ab4df6e505db088882c8ce5417ae0bc8cbb7a8a6" @@ -1573,6 +1585,21 @@ acorn@^6.0.5: resolved "https://registry.yarnpkg.com/acorn/-/acorn-6.1.1.tgz#7d25ae05bb8ad1f9b699108e1094ecd7884adc1f" integrity sha512-jPTiwtOxaHNaAPg/dmrJ/beuzLRnXtB0kQPQ8JpotKJgTB6rX6c8mlf315941pyjBSaPg8NHXS9fhP4u17DpGA== +adal-node@^0.1.22: + version "0.1.28" + resolved "https://registry.yarnpkg.com/adal-node/-/adal-node-0.1.28.tgz#468c4bb3ebbd96b1270669f4b9cba4e0065ea485" + integrity sha1-RoxLs+u9lrEnBmn0ucuk4AZepIU= + dependencies: + "@types/node" "^8.0.47" + async ">=0.6.0" + date-utils "*" + jws "3.x.x" + request ">= 2.52.0" + underscore ">= 1.3.1" + uuid "^3.1.0" + xmldom ">= 0.1.x" + xpath.js "~1.1.0" + agent-base@^2.1.1: version "2.1.1" resolved "https://registry.yarnpkg.com/agent-base/-/agent-base-2.1.1.tgz#d6de10d5af6132d5bd692427d46fc538539094c7" @@ -1905,6 +1932,11 @@ async-sema@2.2.0: resolved "https://registry.yarnpkg.com/async-sema/-/async-sema-2.2.0.tgz#b0de2bb11a2560d369aca975d24650cc05304979" integrity sha512-bI/JChXscebgVmAwTk+EQF4LIr4Hy6W5+B2AG1I9CmkfABmFEYyRrJGutfc816/Hd6LRzHFxpjGqZ/idwhJpRg== +async@>=0.6.0: + version "3.1.0" + resolved "https://registry.yarnpkg.com/async/-/async-3.1.0.tgz#42b3b12ae1b74927b5217d8c0016baaf62463772" + integrity sha512-4vx/aaY6j/j3Lw3fbCHNWP0pPaTCew3F6F3hYyl/tHs/ndmV1q7NW9T5yuJ2XAGwdQrP+6Wu20x06U4APo/iQQ== + async@^1.5.2: version "1.5.2" resolved "https://registry.yarnpkg.com/async/-/async-1.5.2.tgz#ec6a61ae56480c0c3cb241c95618e20892f9672a" @@ -2057,6 +2089,11 @@ bcrypt-pbkdf@^1.0.0: dependencies: tweetnacl "^0.14.3" +big-number@1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/big-number/-/big-number-1.0.0.tgz#a0277607a0adb06492d3099546ef0d547785df18" + integrity sha512-cHUzdT+mMXd1ozht8n5ZwBlNiPO/4zCqqkyp3lF1TMPsRJLXUbQ7cKnfXRkrW475H5SOtSOP0HFeihNbpa53MQ== + big.js@^3.1.3: version "3.2.0" resolved "https://registry.yarnpkg.com/big.js/-/big.js-3.2.0.tgz#a5fc298b81b9e0dca2e458824784b65c52ba588e" @@ -2082,6 +2119,14 @@ binary-extensions@^1.0.0: resolved "https://registry.yarnpkg.com/binary-extensions/-/binary-extensions-1.13.1.tgz#598afe54755b2868a5330d2aff9d4ebb53209b65" integrity sha512-Un7MIEDdUC5gNpcGDV97op1Ywk748MpHcFTHoYs6qnj1Z3j7I53VG3nwZhKzoBZmbdRNnb6WRdFlwl7tSDuZGw== +bl@^2.0.1: + version "2.2.0" + resolved "https://registry.yarnpkg.com/bl/-/bl-2.2.0.tgz#e1a574cdf528e4053019bb800b041c0ac88da493" + integrity sha512-wbgvOpqopSr7uq6fJrLH8EsvYMJf9gzfo2jCsL2eTy75qXPukA4pCgHamOQkZtY5vmfVtjB+P3LNlMHW5CEZXA== + dependencies: + readable-stream "^2.3.5" + safe-buffer "^5.1.1" + bluebird@^3.5.0, bluebird@^3.5.1, bluebird@^3.5.3, bluebird@^3.5.4: version "3.5.5" resolved "https://registry.yarnpkg.com/bluebird/-/bluebird-3.5.5.tgz#a8d0afd73251effbbd5fe384a77d73003c17a71f" @@ -2912,6 +2957,11 @@ date-now@^0.1.4: resolved "https://registry.yarnpkg.com/date-now/-/date-now-0.1.4.tgz#eaf439fd4d4848ad74e5cc7dbef200672b9e345b" integrity sha1-6vQ5/U1ISK105cx9vvIAZyueNFs= +date-utils@*: + version "1.2.21" + resolved "https://registry.yarnpkg.com/date-utils/-/date-utils-1.2.21.tgz#61fb16cdc1274b3c9acaaffe9fc69df8720a2b64" + integrity sha1-YfsWzcEnSzyayq/+n8ad+HIKK2Q= + dateformat@^1.0.12: version "1.0.12" resolved "https://registry.yarnpkg.com/dateformat/-/dateformat-1.0.12.tgz#9f124b67594c937ff706932e4a642cca8dbbfee9" @@ -3049,7 +3099,7 @@ delegates@^1.0.0: resolved "https://registry.yarnpkg.com/delegates/-/delegates-1.0.0.tgz#84c6e159b81904fdca59a0ef44cd870d31250f9a" integrity sha1-hMbhWbgZBP3KWaDvRM2HDTElD5o= -depd@~1.1.1, depd@~1.1.2: +depd@^1.1.2, depd@~1.1.1, depd@~1.1.2: version "1.1.2" resolved "https://registry.yarnpkg.com/depd/-/depd-1.1.2.tgz#9bcd52e14c097763e749b274c4346ed2e560b5a9" integrity sha1-m81S4UwJd2PnSbJ0xDRu0uVgtak= @@ -3751,6 +3801,11 @@ generic-pool@2.4.3: resolved "https://registry.yarnpkg.com/generic-pool/-/generic-pool-2.4.3.tgz#780c36f69dfad05a5a045dd37be7adca11a4f6ff" integrity sha1-eAw29p360FpaBF3Te+etyhGk9v8= +generic-pool@^3.6.1: + version "3.7.1" + resolved "https://registry.yarnpkg.com/generic-pool/-/generic-pool-3.7.1.tgz#36fe5bb83e7e0e032e5d32cd05dc00f5ff119aa8" + integrity sha512-ug6DAZoNgWm6q5KhPFA+hzXfBLFQu5sTXxPpv44DmE0A2g+CiHoq9LTVdkXpZMkYVMoGw83F6W+WT0h0MFMK/w== + get-caller-file@^1.0.1: version "1.0.3" resolved "https://registry.yarnpkg.com/get-caller-file/-/get-caller-file-1.0.3.tgz#f978fa4c90d1dfe7ff2d6beda2a515e713bdcf4a" @@ -4143,7 +4198,7 @@ https-proxy-agent@^2.2.1: agent-base "^4.1.0" debug "^3.1.0" -iconv-lite@^0.4.4, iconv-lite@~0.4.13: +iconv-lite@^0.4.23, iconv-lite@^0.4.4, iconv-lite@~0.4.13: version "0.4.24" resolved "https://registry.yarnpkg.com/iconv-lite/-/iconv-lite-0.4.24.tgz#2022b4b25fbddc21d2f524974a474aafe733908b" integrity sha512-v3MXnZAcvnywkTUEZomIActle7RXXeedOR31wwl7VlyoXO4Qi9arvSenNQWne1TcRwhCL1HwLI21bEqdpj8/rA== @@ -4741,7 +4796,7 @@ jwa@^1.4.1: ecdsa-sig-formatter "1.0.11" safe-buffer "^5.0.1" -jws@^3.1.5: +jws@3.x.x, jws@^3.1.5: version "3.2.2" resolved "https://registry.yarnpkg.com/jws/-/jws-3.2.2.tgz#001099f3639468c9414000e99995fa52fb478304" integrity sha512-YHlZCB6lMTllWDtSPHz/ZXTsi8S00usEV6v1tjq8tOUZzw7DpSDWVXjXDre6ed1w/pd495ODpHZYSdkRTsa0HA== @@ -5408,6 +5463,15 @@ ms@^2.1.1: resolved "https://registry.yarnpkg.com/ms/-/ms-2.1.2.tgz#d09d1f357b443f493382a8eb3ccd183872ae6009" integrity sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w== +mssql@^5.1.0: + version "5.1.0" + resolved "https://registry.yarnpkg.com/mssql/-/mssql-5.1.0.tgz#a06d75866802a8e7e8b69df6a2dc7d2ada82bc4b" + integrity sha512-eHrqRWCEBaXo48y2ZBaDleFvrWm2vYm6dNm1ci0XLYxm6kUb4KRsvjl74iKFhfYyuF9z6qzmTe/QmoQk+YVcVw== + dependencies: + debug "^3.2.6" + generic-pool "^3.6.1" + tedious "^4.2.0" + nan@^2.12.1: version "2.14.0" resolved "https://registry.yarnpkg.com/nan/-/nan-2.14.0.tgz#7818f722027b2459a86f0295d434d1fc2336c52c" @@ -5430,6 +5494,11 @@ nanomatch@^1.2.9: snapdragon "^0.8.1" to-regex "^3.0.1" +native-duplexpair@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/native-duplexpair/-/native-duplexpair-1.0.0.tgz#7899078e64bf3c8a3d732601b3d40ff05db58fa0" + integrity sha1-eJkHjmS/PIo9cyYBs9QP8F21j6A= + needle@^2.2.1: version "2.4.0" resolved "https://registry.yarnpkg.com/needle/-/needle-2.4.0.tgz#6833e74975c444642590e15a750288c5f939b57c" @@ -6695,7 +6764,7 @@ read-pkg@^2.0.0: normalize-package-data "^2.3.2" path-type "^2.0.0" -"readable-stream@1 || 2", readable-stream@^2.0.0, readable-stream@^2.0.1, readable-stream@^2.0.2, readable-stream@^2.0.6, readable-stream@^2.1.5, readable-stream@^2.2.2, readable-stream@^2.3.3, readable-stream@^2.3.6, readable-stream@~2.3.6: +"readable-stream@1 || 2", readable-stream@^2.0.0, readable-stream@^2.0.1, readable-stream@^2.0.2, readable-stream@^2.0.6, readable-stream@^2.1.5, readable-stream@^2.2.2, readable-stream@^2.3.3, readable-stream@^2.3.5, readable-stream@^2.3.6, readable-stream@~2.3.6: version "2.3.6" resolved "https://registry.yarnpkg.com/readable-stream/-/readable-stream-2.3.6.tgz#b11c27d88b8ff1fbe070643cf94b0c79ae1b0aaf" integrity sha512-tQtKA9WIAhBF3+VLAseyMqZeBjW0AHJoxOtYqSUZNJxauErmLbVm2FW1y+J/YA9dUrAC39ITejlZWhVIwawkKw== @@ -6708,7 +6777,7 @@ read-pkg@^2.0.0: string_decoder "~1.1.1" util-deprecate "~1.0.1" -readable-stream@^3.1.1: +readable-stream@^3.0.3, readable-stream@^3.1.1: version "3.4.0" resolved "https://registry.yarnpkg.com/readable-stream/-/readable-stream-3.4.0.tgz#a51c26754658e0a3c21dbf59163bd45ba6f447fc" integrity sha512-jItXPLmrSR8jmTRmRWJXCnGJsfy85mB3Wd/uINMXA65yrnFo0cPClFIUWzo2najVNSl+mx7/4W8ttlLWJe99pQ== @@ -7012,7 +7081,7 @@ replace-ext@1.0.0: resolved "https://registry.yarnpkg.com/replace-ext/-/replace-ext-1.0.0.tgz#de63128373fcbf7c3ccfa4de5a480c45a67958eb" integrity sha1-3mMSg3P8v3w8z6TeWkgMRaZ5WOs= -request@^2.88.0: +"request@>= 2.52.0", request@^2.88.0: version "2.88.0" resolved "https://registry.yarnpkg.com/request/-/request-2.88.0.tgz#9c2fca4f7d35b592efe57c7f0a55e81052124fef" integrity sha512-NAqBSrijGLZdM0WZNsInLJpkJokL72XYjUpnB0iwsRgxh7dB6COrHnTBNwN0E+lHDAJzu7kLAkDeY08z2/A0hg== @@ -7496,6 +7565,11 @@ split@^1.0.0: dependencies: through "2" +sprintf-js@^1.1.1: + version "1.1.2" + resolved "https://registry.yarnpkg.com/sprintf-js/-/sprintf-js-1.1.2.tgz#da1765262bf8c0f571749f2ad6c26300207ae673" + integrity sha512-VE0SOVEHCk7Qc8ulkWw3ntAzXuqf7S2lvwQaDLRnUeIEaKNQJzV6BwmLKhOqT61aGhfUMrXeaBk+oDGCzvhcug== + sprintf-js@~1.0.2: version "1.0.3" resolved "https://registry.yarnpkg.com/sprintf-js/-/sprintf-js-1.0.3.tgz#04e6926f662895354f3dd015203633b857297e2c" @@ -7815,6 +7889,21 @@ tar@^4: safe-buffer "^5.1.2" yallist "^3.0.3" +tedious@^4.2.0: + version "4.2.0" + resolved "https://registry.yarnpkg.com/tedious/-/tedious-4.2.0.tgz#ced27d46c3e3c2f643888bb62f2ea4f463650793" + integrity sha512-Py59XmvMcYWdjc1qyXDsbBwQE3yM8CJzuDnagjRpwjgndaBQXBULDI3D6OxKClbTNxA3qaLBFd9DjfV+is3AYA== + dependencies: + adal-node "^0.1.22" + big-number "1.0.0" + bl "^2.0.1" + depd "^1.1.2" + iconv-lite "^0.4.23" + native-duplexpair "^1.0.0" + punycode "^2.1.0" + readable-stream "^3.0.3" + sprintf-js "^1.1.1" + teeny-request@^3.11.3: version "3.11.3" resolved "https://registry.yarnpkg.com/teeny-request/-/teeny-request-3.11.3.tgz#335c629f7645e5d6599362df2f3230c4cbc23a55" @@ -8231,7 +8320,7 @@ underscore@1.4.x: resolved "https://registry.yarnpkg.com/underscore/-/underscore-1.4.4.tgz#61a6a32010622afa07963bf325203cf12239d604" integrity sha1-YaajIBBiKvoHljvzJSA88SI51gQ= -underscore@~1.9.1: +"underscore@>= 1.3.1", underscore@~1.9.1: version "1.9.1" resolved "https://registry.yarnpkg.com/underscore/-/underscore-1.9.1.tgz#06dce34a0e68a7babc29b365b8e74b8925203961" integrity sha512-5/4etnCkd9c8gwgowi5/om/mYO5ajCaOgdzj/oW+0eQV9WxKBDZw5+ycmKmeaTXjInS/W0BzpGLo2xR2aBwZdg== @@ -8478,7 +8567,7 @@ util@^0.11.0: dependencies: inherits "2.0.3" -uuid@^3.3.2: +uuid@^3.1.0, uuid@^3.3.2: version "3.3.2" resolved "https://registry.yarnpkg.com/uuid/-/uuid-3.3.2.tgz#1b4af4955eb3077c501c23872fc6513811587131" integrity sha512-yXJmeNaw3DnnKAOKJE51sL/ZaYfWJRl1pK9dr19YFCu0ObS231AB1/LbqTKRAQ5kw8A90rA6fr4riOUpTZvQZA== @@ -8763,6 +8852,16 @@ xmlcreate@^2.0.0: resolved "https://registry.yarnpkg.com/xmlcreate/-/xmlcreate-2.0.1.tgz#2ec38bd7b708d213fd1a90e2431c4af9c09f6a52" integrity sha512-MjGsXhKG8YjTKrDCXseFo3ClbMGvUD4en29H2Cev1dv4P/chlpw6KdYmlCWDkhosBVKRDjM836+3e3pm1cBNJA== +"xmldom@>= 0.1.x": + version "0.1.27" + resolved "https://registry.yarnpkg.com/xmldom/-/xmldom-0.1.27.tgz#d501f97b3bdb403af8ef9ecc20573187aadac0e9" + integrity sha1-1QH5ezvbQDr4757MIFcxh6rawOk= + +xpath.js@~1.1.0: + version "1.1.0" + resolved "https://registry.yarnpkg.com/xpath.js/-/xpath.js-1.1.0.tgz#3816a44ed4bb352091083d002a383dd5104a5ff1" + integrity sha512-jg+qkfS4K8E7965sqaUl8mRngXiKb3WZGfONgE18pr03FUQiuSV6G+Ej4tS55B+rIQSFEIw3phdVAQ4pPqNWfQ== + xtend@^4.0.0, xtend@^4.0.1, xtend@~4.0.1: version "4.0.1" resolved "https://registry.yarnpkg.com/xtend/-/xtend-4.0.1.tgz#a5c6d532be656e23db820efb943a1f04998d63af"