diff --git a/CHANGELOG.md b/CHANGELOG.md index 1ae26fe..897953e 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -2,6 +2,54 @@ All notable changes to this project will be documented in this file. +## v2.0.0 - 2023-11-15 [BREAKING CHANGES] + +We migrated from `clickhouse` to `@clickhouse/client` for stability and better maintainability. + +This means, you need to replace old configurations: + +```ts +import {ClickHouse} from 'clickhouse'; +import winston from 'winston'; +import {QueryBuilder} from 'clickhouse-query'; + +const clickhouse = new ClickHouse({ + url: 'http://localhost', + port: 8123, + basicAuth: {username: 'user', password: 'password'}, + format: 'json', + raw: false, +}); +const logger = winston.createLogger(); // not required, you can pass as null +const builder = new QueryBuilder(clickhouse, logger); +``` + +With the following: + +```ts +import {createClient} from '@clickhouse/client' +import winston from 'winston'; +import {QueryBuilder} from 'clickhouse-query'; + +// Configuration documentation - https://clickhouse.com/docs/en/integrations/language-clients/javascript#configuration +const clickhouse = createClient({ + host: 'http://localhost:8123', + username: 'user', + password: 'password', + database: 'my_database', + application: 'my_app' +}); +const logger = winston.createLogger(); // not required, you can pass null if no logging required +const builder = new QueryBuilder(clickhouse, logger); +``` + +- `AlterTableQuery::execute()` now DOES NOT accept generic and simply returns `Promise` +- `CreateTableQuery::execute()` now DOES NOT accept generic and simply returns `Promise` +- `DeleteQuery::execute()` now DOES NOT accept generic and simply returns `Promise` +- `InsertQuery::execute()` now DOES NOT accept generic and simply returns `Promise` + + + ## v1.7.5 - 2023-03-05 ### Added diff --git a/README.md b/README.md index c369cb2..92be3a1 100644 --- a/README.md +++ b/README.md @@ -14,11 +14,6 @@ ClickHouse Query is intuitive query builder to simplify usage of [ClickHouse](ht - [Usage](#usage) * [Installation](#installation) * [Quick start](#quick-start) -- [CREATE TABLE](#create-table) -- [INSERT](#insert) -- [ALTER TABLE](#alter-table) -- [DELETE](#delete) -- [UPDATE](#update) - [SELECT](#select) * [FINAL](#final) * [FROM](#from) @@ -27,9 +22,14 @@ ClickHouse Query is intuitive query builder to simplify usage of [ClickHouse](ht * [JOIN](#join) * [LIMIT/OFFSET](#limitoffset) * [WITH](#with) - * [Helper Functions](#helper-functions) - + [fx](#fx) - + [schema](#schema) +- [INSERT](#insert) +- [DELETE](#delete) +- [UPDATE](#update) +- [CREATE TABLE](#create-table) +- [ALTER TABLE](#alter-table) +- [Helper Functions](#helper-functions) + * [fx](#fx) + * [schema](#schema) * [More examples](#more-examples) - [Tests](#tests) @@ -77,19 +77,22 @@ import {QueryBuilder, fx, expr, schema} from 'clickhouse-query'; ### Quick start +ClickHouse Query is using `@clickhouse/client` package to perform queries to ClickHouse. + ```ts -import {ClickHouse} from 'clickhouse'; +import {createClient} from '@clickhouse/client' import winston from 'winston'; import {QueryBuilder} from 'clickhouse-query'; -const clickhouse = new ClickHouse({ - url: 'http://localhost', - port: 8123, - basicAuth: {username: 'user', password: 'password'}, - format: 'json', - raw: false, +// Configuration documentation - https://clickhouse.com/docs/en/integrations/language-clients/javascript#configuration +const clickhouse = createClient({ + host: 'http://localhost:8123', + username: 'user', + password: 'password', + database: 'my_database', + application: 'my_app' }); -const logger = winston.createLogger(); // not required, you can pass as null +const logger = winston.createLogger(); // not required, you can pass null if no logging required const builder = new QueryBuilder(clickhouse, logger); const users = await builder.query() .select('email') @@ -112,214 +115,363 @@ const users = await builder.query() // Executes: SELECT email FROM users ``` -## CREATE TABLE +## SELECT -Creating tables as simple as this: +Builder has special method called `query()` which allows you to build SELECT queries. Below you may find a couple of +examples. -```ts -import {schema} from 'clickhouse-query'; +Select single column: -await builder.createTable() - .table('table_name') - .column('column1', schema.string()) - .engine('Memory') +```ts +await builder.query() + .select('id') + .from('users') .execute(); -// Executes: CREATE TABLE table_name(column1 String) ENGINE = Memory +// Executes: SELECT id FROM users ``` -Also, you can provide multiple columns to create: +Select multiple columns: ```ts -import {schema} from 'clickhouse-query'; - -await builder.createTable() - .table('table_name') - .column('column1', schema.string()) - .column('column_date', schema.dateTime()) - .engine('Memory') +await builder.query() + .select(['id', 'email']) + .from('users') .execute(); -// Executes: CREATE TABLE table_name(column1 String, column_date DateTime) ENGINE = Memory +// Executes: SELECT id, email FROM users ``` -Create table with `ORDER BY`: +Select from sub-query: ```ts -import {schema} from 'clickhouse-query'; - -await builder.createTable() - .table('table_name') - .column('column1', schema.string()) - .column('column_date', schema.dateTime()) - .engine('MergeTree()') - .orderBy(['column1', 'column_date']) +await builder.query() + .select(['ip']) + .from( + builder.query() + .select('ip') + .from('metrics') + ) .execute(); -// Executes: CREATE TABLE table_name(column1 String, column_date DateTime) ENGINE = MergeTree() ORDER BY (column1, column_date) +// Executes: SELECT ip FROM (SELECT ip FROM metrics) ``` -Create table with `IF NOT EXISTS`: +Select with alias: ```ts -import {schema} from 'clickhouse-query'; - -await builder.createTable() - .table('table_name') - .ifNotExists() - .column('column1', schema.string()) - .engine('Memory') +await builder.query() + .select(['ip']) + .from( + builder.query() + .select('ip') + .from('metrics') + ) + .as('m') .execute(); -// Executes: CREATE TABLE IF NOT EXISTS table_name(column1 String) ENGINE = Memory +// Executes: (SELECT ip FROM metrics) AS m ``` -## ALTER TABLE +### FINAL -Builder has special method called `alterTable()` to handle ALTER TABLE queries. Below you may find a couple of examples. +Generates SQL query with `FINAL` keyword. -Add column: +Useful for tables which +use [ReplacingMergeTree](https://clickhouse.com/docs/en/engines/table-engines/mergetree-family/replacingmergetree) +engine to get rid-off duplicate entries. ```ts -import {schema} from 'clickhouse-query'; -import {AddColumn} from 'clickhouse-query/AlterTable/AddColumn'; - -await builder.alterTable() - .table('table_name') - .addColumn((new AddColumn()).name('column1').type(schema.string())) +await builder.query() + .select(['id', 'email']) + .final() + .from('users') + .where('id', '=', 1) .execute(); -// Executes: ALTER TABLE table_name ADD COLUMN column1 String +// Executes: SELECT id, email FROM users FINAL WHERE id = 1 ``` -Drop column: - -```ts -import {DropColumn} from 'clickhouse-query/AlterTable/DropColumn'; - -await builder.alterTable() - .table('table_name') - .dropColumn((new DropColumn()).name('column1')) - .execute(); -// Executes: ALTER TABLE table_name DROP COLUMN column1 -``` +### FROM -Rename column: +Select with table alias: ```ts -import {RenameColumn} from 'clickhouse-query/AlterTable/RenameColumn'; - -await builder.alterTable() - .table('table_name') - .renameColumn((new RenameColumn()).from('column1').to('column2')) +await builder.query() + .select('id') + .from('users', 'u') .execute(); -// Executes: ALTER TABLE table_name RENAME COLUMN column1 TO column2 -``` +// Executes: SELECT id FROM users u +```` -Modify column: +### WHERE -```ts -import {schema} from 'clickhouse-query'; -import {ModifyColumn} from 'clickhouse-query/AlterTable/ModifyColumn'; +The following operators are supported: -await builder.alterTable() - .table('table_name') - .modifyColumn((new ModifyColumn()).modify().name('column1').type(schema.string())) - .execute(); -// Executes: ALTER TABLE table_name MODIFY COLUMN column1 String -``` +- `=` +- `<` +- `>` +- `>=` +- `<=` +- `!=` +- `BETWEEN` +- `IN` +- `NOT IN` +- `LIKE` +- `NOT LIKE` +- `IS NULL` +- `IS NOT NULL` -Modify column with `AFTER`: +Simple condition: ```ts -import {schema} from 'clickhouse-query'; -import {ModifyColumn} from 'clickhouse-query/AlterTable/ModifyColumn'; - -await builder.alterTable() - .table('table_name') - .modifyColumn((new ModifyColumn()).modify().name('column1').type(schema.string()).after('column2')) - .after('column2') +await builder.query() + .select(['email']) + .from('users') + .where('status', '=', 10) .execute(); -// Executes: ALTER TABLE table_name MODIFY COLUMN column1 String AFTER column2 +// Executes: SELECT email FROM users WHERE status = 10 ``` -Modify column with `FIRST`: +Where with AND condition: ```ts -import {schema} from 'clickhouse-query'; -import {ModifyColumn} from 'clickhouse-query/AlterTable/ModifyColumn'; - -await builder.alterTable() - .table('table_name') - .modifyColumn((new ModifyColumn()).modify().name('column1').type(schema.string()).first()) - .first() +await builder.query() + .select(['email']) + .from('users') + .where('status', '>', 10) + .andWhere('email', '=', 'john.doe@example.com') .execute(); -// Executes: ALTER TABLE table_name MODIFY COLUMN column1 String FIRST +// Executes: SELECT email FROM users WHERE status > 10 AND email LIKE 'john.doe@example.com' ``` -## INSERT - -Builder has special method called `insert()` to handle INSERT queries. Below you may find a couple of examples. - -Insert single row: +Numeric `BETWEEN` condition: ```ts -await builder.insert() - .into('metrics') - .columns(['id', 'ip', 'created_date']) - .values({id: 1, ip: '127.0.0.1', created_date: '2022-12-20'}) +await builder.query() + .select(['email']) + .from('users') + .where('id', 'BETWEEN', [1, 100]) .execute(); -// Executes: INSERT INTO metrics (id, ip, created_date) VALUES (1, '127.0.0.1', '2022-12-20') +// Executes: SELECT email FROM users WHERE created_date BETWEEN 1 AND 100 ``` -Definition of `columns()` is optional, you can use `values()` without it. `values()` will use the first row to determine -the columns. +Date `BETWEEN` condition: ```ts -await builder.insert() - .into('metrics') - .values({id: 1, ip: '127.0.0.1', created_date: '2022-12-20'}) +await builder.query() + .select(['email']) + .from('users') + .where('created_date', 'BETWEEN', ['2022-01-01', '2022-01-31']) .execute(); -// Executes: INSERT INTO metrics (id, ip, created_date) VALUES (1, '127.0.0.1', '2022-12-20') +// Executes: SELECT email FROM users WHERE created_date BETWEEN '2022-01-01' AND '2022-01-31' ``` -You can chain multiple rows using `values()`: +`IN`/`NOT IN` condition: ```ts -await builder.insert() - .into('metrics') - .columns(['id', 'ip', 'created_date']) - .values({id: 1, ip: '127.0.0.1', created_date: '2022-12-20'}) - .values({id: 2, ip: '127.0.0.2', created_date: '2022-12-21'}) +await builder.query() + .select(['email']) + .from('users') + .where('id', 'IN', [1, 2, 3]) .execute(); -// Executes: INSERT INTO metrics (id, ip, created_date) VALUES (1, '127.0.0.1', '2022-12-20'), (2, '127.0.0.2', '2022-12-21') +// Executes: SELECT email FROM users WHERE id IN (1, 2, 3) ``` -You can write bulk rows (same as above): +Sub-query example: ```ts -await builder.insert() - .into('metrics') - .columns(['id', 'ip', 'created_date']) - .values([ - {id: 1, ip: '127.0.0.1', created_date: '2022-12-20'}, - {id: 2, ip: '127.0.0.2', created_date: '2022-12-21'} - ]) +await builder.query() + .select(['email']) + .from('users') + .where('id', 'IN', builder.query().select(['id']).from('test2')) .execute(); -// Executes: INSERT INTO metrics (id, ip, created_date) VALUES (1, '127.0.0.1', '2022-12-20'), (2, '127.0.0.2', '2022-12-21') +// Executes: SELECT email FROM users WHERE id IN (SELECT id FROM test2) ``` -## DELETE - -Builder has special method called `delete()` to handle DELETE queries. Below you may find a couple of examples. +`LIKE`/`NOT LIKE` condition: ```ts -await builder.delete() - .table('metrics') - .where('created_date', '>', '2022-12-20') +await builder.query() + .select(['email']) + .from('users') + .where('email', 'LIKE', '%@gmail.com') .execute(); -// Executes: ALTER TABLE metrics DELETE WHERE created_date > '2022-12-20' +// Executes: SELECT email FROM users WHERE email LIKE '%@gmail.com' ``` -If you want to delete everything from table use it as following: +### HAVING ```ts -await builder.delete() +await await builder + .select([ + 'repo_name', + fx.sum(expr("event_type = 'ForkEvent'")).as('forks'), + fx.sum(expr("event_type = 'WatchEvent'")).as('stars'), + fx.round(expr('stars / forks'), 2).as('ratio'), + ]) + .from('github_events') + .where('event_type', 'IN', ['ForkEvent', 'WatchEvent']) + .groupBy(['repo_name']) + .orderBy([['ratio', 'DESC']]) + .having('stars', '>', 100) + .andHaving('forks', '>', 100) + .limit(50) + .execute(); +// Executes: SELECT repo_name, sum(event_type = 'ForkEvent') AS forks, sum(event_type = 'WatchEvent') AS stars, round(stars / forks, 2) AS ratio FROM github_events WHERE event_type IN ('ForkEvent', 'WatchEvent') GROUP BY repo_name HAVING stars > 100 AND forks > 100 ORDER BY ratio DESC LIMIT 50 +``` + +### JOIN + +By default, if you provide `JOIN`, `INNER JOIN` would be used. + +You may chain as multiple joins if needed. + +```ts +await builder.query() + .select(['id', 'first_name']) + .from('users', 'u') + .join( + 'INNER JOIN', + getQuery() + .select(['user_id']) + .from('posts') + .where('id', '>', 1), + 'p', + 'p.user_id = u.user_id' + ) + .execute(); +// Executes: SELECT id, first_name FROM users AS u INNER JOIN (SELECT user_id FROM posts WHERE id > 1) AS p ON p.user_id = u.user_id +``` + +### LIMIT/OFFSET + +```ts +await builder.query() + .select(['id', 'first_name']) + .from('users') + .limit(10) + .offset(0) + .generateSql(); +// Executes: SELECT id, first_name FROM users OFFSET 0 ROW FETCH FIRST 10 ROWS ONLY +``` + +### WITH + +```ts +await builder.query() + .with([ + expr("toStartOfDay(toDate('2021-01-01'))").as('start'), + expr("toStartOfDay(toDate('2021-01-02'))").as('end'), + ]) + .select([ + fx.arrayJoin( + expr('arrayMap(x -> toDateTime(x), range(toUInt32(start), toUInt32(end), 3600))') + ) + ]) + .execute(); +// Executes: WITH toStartOfDay(toDate('2021-01-01')) AS start, toStartOfDay(toDate('2021-01-02')) AS end SELECT arrayJoin(arrayMap(x -> toDateTime(x), range(toUInt32(start), toUInt32(end), 3600))) +``` + +Using constant expression as "variable": + +```ts +import {expr} from 'clickhouse-query'; + +await builder.query() + .with('2019-08-01 15:23:00', 'ts_upper_bound') + .select('*') + .from('hits') + .where('EventDate', '=', expr('toDate(ts_upper_bound)')) + .andWhere('EventTime', '<', expr('ts_upper_bound')) + .execute(); +// Executes: WITH '2019-08-01 15:23:00' AS ts_upper_bound SELECT * FROM hits WHERE EventDate = toDate(ts_upper_bound) AND EventTime < ts_upper_bound +``` + +Using results of a scalar subquery: + +```ts +import {fx, expr} from 'clickhouse-query'; + +await builder.query() + .with([ + q2.select([fx.sum(expr('bytes'))]) + .from('system.parts') + .where('active', '=', 1) + .as('total_disk_usage') + ]) + .select([expr('(sum(bytes) / total_disk_usage) * 100 AS table_disk_usage'), expr('table')]) + .from('system.parts') + .groupBy(['table']) + .orderBy([['table_disk_usage', 'DESC']]) + .limit(10) + .execute(); +// Executes: WITH (SELECT sum(bytes) FROM system.parts WHERE active = 1) AS total_disk_usage SELECT (sum(bytes) / total_disk_usage) * 100 AS table_disk_usage, table FROM system.parts GROUP BY table ORDER BY table_disk_usage DESC LIMIT 10 +``` + +## INSERT + +Builder has special method called `insert()` to handle INSERT queries. Below you may find a couple of examples. + +Insert single row: + +```ts +await builder.insert() + .into('metrics') + .columns(['id', 'ip', 'created_date']) + .values({id: 1, ip: '127.0.0.1', created_date: '2022-12-20'}) + .execute(); +// Executes: INSERT INTO metrics (id, ip, created_date) VALUES (1, '127.0.0.1', '2022-12-20') +``` + +Definition of `columns()` is optional, you can use `values()` without it. `values()` will use the first row to determine +the columns. + +```ts +await builder.insert() + .into('metrics') + .values({id: 1, ip: '127.0.0.1', created_date: '2022-12-20'}) + .execute(); +// Executes: INSERT INTO metrics (id, ip, created_date) VALUES (1, '127.0.0.1', '2022-12-20') +``` + +You can chain multiple rows using `values()`: + +```ts +await builder.insert() + .into('metrics') + .columns(['id', 'ip', 'created_date']) + .values({id: 1, ip: '127.0.0.1', created_date: '2022-12-20'}) + .values({id: 2, ip: '127.0.0.2', created_date: '2022-12-21'}) + .execute(); +// Executes: INSERT INTO metrics (id, ip, created_date) VALUES (1, '127.0.0.1', '2022-12-20'), (2, '127.0.0.2', '2022-12-21') +``` + +You can write bulk rows (same as above): + +```ts +await builder.insert() + .into('metrics') + .columns(['id', 'ip', 'created_date']) + .values([ + {id: 1, ip: '127.0.0.1', created_date: '2022-12-20'}, + {id: 2, ip: '127.0.0.2', created_date: '2022-12-21'} + ]) + .execute(); +// Executes: INSERT INTO metrics (id, ip, created_date) VALUES (1, '127.0.0.1', '2022-12-20'), (2, '127.0.0.2', '2022-12-21') +``` + +## DELETE + +Builder has special method called `delete()` to handle DELETE queries. Below you may find a couple of examples. + +```ts +await builder.delete() + .table('metrics') + .where('created_date', '>', '2022-12-20') + .execute(); +// Executes: ALTER TABLE metrics DELETE WHERE created_date > '2022-12-20' +``` + +If you want to delete everything from table use it as following: + +```ts +await builder.delete() .table('metrics') .all() .execute(); @@ -390,298 +542,149 @@ await builder.update() // Executes: ALTER TABLE metrics UPDATE ips = ['127.0.0.1', '127.0.0.2'] WHERE id = 1 ``` -## SELECT - -Builder has special method called `query()` which allows you to build SELECT queries. Below you may find a couple of -examples. - -Select single column: - -```ts -await builder.query() - .select('id') - .from('users') - .execute(); -// Executes: SELECT id FROM users -``` +## CREATE TABLE -Select multiple columns: +Creating tables as simple as this: ```ts -await builder.query() - .select(['id', 'email']) - .from('users') - .execute(); -// Executes: SELECT id, email FROM users -``` - -Select from sub-query: +import {schema} from 'clickhouse-query'; -```ts -await builder.query() - .select(['ip']) - .from( - builder.query() - .select('ip') - .from('metrics') - ) +await builder.createTable() + .table('table_name') + .column('column1', schema.string()) + .engine('Memory') .execute(); -// Executes: SELECT ip FROM (SELECT ip FROM metrics) +// Executes: CREATE TABLE table_name(column1 String) ENGINE = Memory ``` -Select with alias: +Also, you can provide multiple columns to create: ```ts -await builder.query() - .select(['ip']) - .from( - builder.query() - .select('ip') - .from('metrics') - ) - .as('m') - .execute(); -// Executes: (SELECT ip FROM metrics) AS m -``` - -### FINAL - -Generates SQL query with `FINAL` keyword. - -Useful for tables which -use [ReplacingMergeTree](https://clickhouse.com/docs/en/engines/table-engines/mergetree-family/replacingmergetree) -engine to get rid-off duplicate entries. +import {schema} from 'clickhouse-query'; -```ts -await builder.query() - .select(['id', 'email']) - .final() - .from('users') - .where('id', '=', 1) +await builder.createTable() + .table('table_name') + .column('column1', schema.string()) + .column('column_date', schema.dateTime()) + .engine('Memory') .execute(); -// Executes: SELECT id, email FROM users FINAL WHERE id = 1 +// Executes: CREATE TABLE table_name(column1 String, column_date DateTime) ENGINE = Memory ``` -### FROM - -Select with table alias: +Create table with `ORDER BY`: ```ts -await builder.query() - .select('id') - .from('users', 'u') - .execute(); -// Executes: SELECT id FROM users u -```` - -### WHERE - -The following operators are supported: - -- `=` -- `<` -- `>` -- `>=` -- `<=` -- `!=` -- `BETWEEN` -- `IN` -- `NOT IN` -- `LIKE` -- `NOT LIKE` -- `IS NULL` -- `IS NOT NULL` - -Simple condition: +import {schema} from 'clickhouse-query'; -```ts -await builder.query() - .select(['email']) - .from('users') - .where('status', '=', 10) +await builder.createTable() + .table('table_name') + .column('column1', schema.string()) + .column('column_date', schema.dateTime()) + .engine('MergeTree()') + .orderBy(['column1', 'column_date']) .execute(); -// Executes: SELECT email FROM users WHERE status = 10 +// Executes: CREATE TABLE table_name(column1 String, column_date DateTime) ENGINE = MergeTree() ORDER BY (column1, column_date) ``` -Where with AND condition: +Create table with `IF NOT EXISTS`: ```ts -await builder.query() - .select(['email']) - .from('users') - .where('status', '>', 10) - .andWhere('email', '=', 'john.doe@example.com') - .execute(); -// Executes: SELECT email FROM users WHERE status > 10 AND email LIKE 'john.doe@example.com' -``` - -Numeric `BETWEEN` condition: +import {schema} from 'clickhouse-query'; -```ts -await builder.query() - .select(['email']) - .from('users') - .where('id', 'BETWEEN', [1, 100]) +await builder.createTable() + .table('table_name') + .ifNotExists() + .column('column1', schema.string()) + .engine('Memory') .execute(); -// Executes: SELECT email FROM users WHERE created_date BETWEEN 1 AND 100 +// Executes: CREATE TABLE IF NOT EXISTS table_name(column1 String) ENGINE = Memory ``` -Date `BETWEEN` condition: +## ALTER TABLE -```ts -await builder.query() - .select(['email']) - .from('users') - .where('created_date', 'BETWEEN', ['2022-01-01', '2022-01-31']) - .execute(); -// Executes: SELECT email FROM users WHERE created_date BETWEEN '2022-01-01' AND '2022-01-31' -``` +Builder has special method called `alterTable()` to handle ALTER TABLE queries. Below you may find a couple of examples. -`IN`/`NOT IN` condition: +Add column: ```ts -await builder.query() - .select(['email']) - .from('users') - .where('id', 'IN', [1, 2, 3]) - .execute(); -// Executes: SELECT email FROM users WHERE id IN (1, 2, 3) -``` - -Sub-query example: +import {schema} from 'clickhouse-query'; +import {AddColumn} from 'clickhouse-query/AlterTable/AddColumn'; -```ts -await builder.query() - .select(['email']) - .from('users') - .where('id', 'IN', builder.query().select(['id']).from('test2')) +await builder.alterTable() + .table('table_name') + .addColumn((new AddColumn()).name('column1').type(schema.string())) .execute(); -// Executes: SELECT email FROM users WHERE id IN (SELECT id FROM test2) +// Executes: ALTER TABLE table_name ADD COLUMN column1 String ``` -`LIKE`/`NOT LIKE` condition: +Drop column: ```ts -await builder.query() - .select(['email']) - .from('users') - .where('email', 'LIKE', '%@gmail.com') - .execute(); -// Executes: SELECT email FROM users WHERE email LIKE '%@gmail.com' -``` - -### HAVING +import {DropColumn} from 'clickhouse-query/AlterTable/DropColumn'; -```ts -await await builder - .select([ - 'repo_name', - fx.sum(expr("event_type = 'ForkEvent'")).as('forks'), - fx.sum(expr("event_type = 'WatchEvent'")).as('stars'), - fx.round(expr('stars / forks'), 2).as('ratio'), - ]) - .from('github_events') - .where('event_type', 'IN', ['ForkEvent', 'WatchEvent']) - .groupBy(['repo_name']) - .orderBy([['ratio', 'DESC']]) - .having('stars', '>', 100) - .andHaving('forks', '>', 100) - .limit(50) +await builder.alterTable() + .table('table_name') + .dropColumn((new DropColumn()).name('column1')) .execute(); -// Executes: SELECT repo_name, sum(event_type = 'ForkEvent') AS forks, sum(event_type = 'WatchEvent') AS stars, round(stars / forks, 2) AS ratio FROM github_events WHERE event_type IN ('ForkEvent', 'WatchEvent') GROUP BY repo_name HAVING stars > 100 AND forks > 100 ORDER BY ratio DESC LIMIT 50 +// Executes: ALTER TABLE table_name DROP COLUMN column1 ``` -### JOIN - -By default, if you provide `JOIN`, `INNER JOIN` would be used. - -You may chain as multiple joins if needed. +Rename column: ```ts -await builder.query() - .select(['id', 'first_name']) - .from('users', 'u') - .join( - 'INNER JOIN', - getQuery() - .select(['user_id']) - .from('posts') - .where('id', '>', 1), - 'p', - 'p.user_id = u.user_id' - ) +import {RenameColumn} from 'clickhouse-query/AlterTable/RenameColumn'; + +await builder.alterTable() + .table('table_name') + .renameColumn((new RenameColumn()).from('column1').to('column2')) .execute(); -// Executes: SELECT id, first_name FROM users AS u INNER JOIN (SELECT user_id FROM posts WHERE id > 1) AS p ON p.user_id = u.user_id +// Executes: ALTER TABLE table_name RENAME COLUMN column1 TO column2 ``` -### LIMIT/OFFSET +Modify column: ```ts -await builder.query() - .select(['id', 'first_name']) - .from('users') - .limit(10) - .offset(0) - .generateSql(); -// Executes: SELECT id, first_name FROM users OFFSET 0 ROW FETCH FIRST 10 ROWS ONLY -``` - -### WITH +import {schema} from 'clickhouse-query'; +import {ModifyColumn} from 'clickhouse-query/AlterTable/ModifyColumn'; -```ts -await builder.query() - .with([ - expr("toStartOfDay(toDate('2021-01-01'))").as('start'), - expr("toStartOfDay(toDate('2021-01-02'))").as('end'), - ]) - .select([ - fx.arrayJoin( - expr('arrayMap(x -> toDateTime(x), range(toUInt32(start), toUInt32(end), 3600))') - ) - ]) +await builder.alterTable() + .table('table_name') + .modifyColumn((new ModifyColumn()).modify().name('column1').type(schema.string())) .execute(); -// Executes: WITH toStartOfDay(toDate('2021-01-01')) AS start, toStartOfDay(toDate('2021-01-02')) AS end SELECT arrayJoin(arrayMap(x -> toDateTime(x), range(toUInt32(start), toUInt32(end), 3600))) +// Executes: ALTER TABLE table_name MODIFY COLUMN column1 String ``` -Using constant expression as "variable": +Modify column with `AFTER`: ```ts -import {expr} from 'clickhouse-query'; +import {schema} from 'clickhouse-query'; +import {ModifyColumn} from 'clickhouse-query/AlterTable/ModifyColumn'; -await builder.query() - .with('2019-08-01 15:23:00', 'ts_upper_bound') - .select('*') - .from('hits') - .where('EventDate', '=', expr('toDate(ts_upper_bound)')) - .andWhere('EventTime', '<', expr('ts_upper_bound')) +await builder.alterTable() + .table('table_name') + .modifyColumn((new ModifyColumn()).modify().name('column1').type(schema.string()).after('column2')) + .after('column2') .execute(); -// Executes: WITH '2019-08-01 15:23:00' AS ts_upper_bound SELECT * FROM hits WHERE EventDate = toDate(ts_upper_bound) AND EventTime < ts_upper_bound +// Executes: ALTER TABLE table_name MODIFY COLUMN column1 String AFTER column2 ``` -Using results of a scalar subquery: +Modify column with `FIRST`: ```ts -import {fx, expr} from 'clickhouse-query'; +import {schema} from 'clickhouse-query'; +import {ModifyColumn} from 'clickhouse-query/AlterTable/ModifyColumn'; -await builder.query() - .with([ - q2.select([fx.sum(expr('bytes'))]) - .from('system.parts') - .where('active', '=', 1) - .as('total_disk_usage') - ]) - .select([expr('(sum(bytes) / total_disk_usage) * 100 AS table_disk_usage'), expr('table')]) - .from('system.parts') - .groupBy(['table']) - .orderBy([['table_disk_usage', 'DESC']]) - .limit(10) +await builder.alterTable() + .table('table_name') + .modifyColumn((new ModifyColumn()).modify().name('column1').type(schema.string()).first()) + .first() .execute(); -// Executes: WITH (SELECT sum(bytes) FROM system.parts WHERE active = 1) AS total_disk_usage SELECT (sum(bytes) / total_disk_usage) * 100 AS table_disk_usage, table FROM system.parts GROUP BY table ORDER BY table_disk_usage DESC LIMIT 10 +// Executes: ALTER TABLE table_name MODIFY COLUMN column1 String FIRST ``` -### Helper Functions +## Helper Functions -#### fx +### fx Use `fx` helper to access ClickHouse functions. @@ -730,7 +733,7 @@ List of available helpers: - `positionCaseInsensitive` - `translateUTF8` -#### schema +### schema Use `schema` helper to access ClickHouse data types. This helper can be used when creating tables or altering changes. @@ -745,7 +748,7 @@ await builder.createTable() // Executes: CREATE TABLE table_name(column1 String) ENGINE = Memory ``` -### More examples +## More Code Examples For further query examples you can check `__tests__` folder. @@ -759,5 +762,13 @@ Run tests: yarn tests ``` +## Scripts + +- `yarn build` - build project +- `yarn test` - run tests +- `yarn toc` - auto-generate table of contents + + + diff --git a/__tests__/AlterTable/AlterTableQuery.test.ts b/__tests__/AlterTable/AlterTableQuery.test.ts index 25a2a0b..f0dff63 100644 --- a/__tests__/AlterTable/AlterTableQuery.test.ts +++ b/__tests__/AlterTable/AlterTableQuery.test.ts @@ -2,7 +2,7 @@ import {describe, expect, it} from '@jest/globals'; import {AddColumn} from '../../src/AlterTable/AddColumn'; import {schema} from '../../src'; import {AlterTableQuery} from '../../src/AlterTable/AlterTableQuery'; -import {ClickHouse} from 'clickhouse'; +import {createClient} from '@clickhouse/client' import winston from 'winston'; import {DropColumn} from '../../src/AlterTable/DropColumn'; import {RenameColumn} from '../../src/AlterTable/RenameColumn'; @@ -14,7 +14,7 @@ import {ModifyColumn} from '../../src/AlterTable/ModifyColumn'; jest.mock('winston'); // @ts-ignore -jest.mock('clickhouse'); +jest.mock('@clickhouse/client'); function createLogger() { return winston.createLogger({ @@ -23,7 +23,7 @@ function createLogger() { } function getAlterTableQuery(): AlterTableQuery { - return new AlterTableQuery(new ClickHouse({}), createLogger()); + return new AlterTableQuery(createClient(), createLogger()); } describe('AlterTableQuery', () => { diff --git a/__tests__/CreateTableQuery.test.ts b/__tests__/CreateTableQuery.test.ts index ab0e0a0..adf0f98 100644 --- a/__tests__/CreateTableQuery.test.ts +++ b/__tests__/CreateTableQuery.test.ts @@ -1,13 +1,14 @@ import {describe, expect, it} from '@jest/globals'; -import {ClickHouse} from 'clickhouse'; +import {createClient} from '@clickhouse/client' import winston from 'winston'; import {CreateTableQuery} from '../src/internal'; +import {schema} from '../src'; // @ts-ignore jest.mock('winston'); // @ts-ignore -jest.mock('clickhouse'); +jest.mock('@clickhouse/client'); function createLogger() { return winston.createLogger({ @@ -16,7 +17,7 @@ function createLogger() { } function getCreateTableQuery(): CreateTableQuery { - return new CreateTableQuery(new ClickHouse({}), createLogger()); + return new CreateTableQuery(createClient(), createLogger()); } describe('CreateTableQuery', () => { @@ -24,7 +25,7 @@ describe('CreateTableQuery', () => { const createTable = getCreateTableQuery(); const sql = createTable .table('table_name') - .column('column1', createTable.string()) + .column('column1', schema.string()) .engine('Memory') .generateSql(); @@ -35,8 +36,8 @@ describe('CreateTableQuery', () => { const createTable = getCreateTableQuery(); const sql = createTable .table('table_name') - .column('column1', createTable.string()) - .column('column_date', createTable.dateTime()) + .column('column1', schema.string()) + .column('column_date', schema.dateTime()) .engine('Memory') .generateSql(); @@ -47,8 +48,8 @@ describe('CreateTableQuery', () => { const createTable = getCreateTableQuery(); const sql = createTable .table('table_name') - .column('column1', createTable.string()) - .column('column_date', createTable.dateTime()) + .column('column1', schema.string()) + .column('column_date', schema.dateTime()) .engine('MergeTree()') .orderBy(['column1', 'column_date']) .generateSql(); @@ -61,7 +62,7 @@ describe('CreateTableQuery', () => { const sql = createTable .table('table_name') .ifNotExists() - .column('column1', createTable.string()) + .column('column1', schema.string()) .engine('Memory') .generateSql(); @@ -73,7 +74,7 @@ describe('CreateTableQuery', () => { const sql = createTable .table('table_name') .onCluster('my_cluster') - .column('column1', createTable.string()) + .column('column1', schema.string()) .engine('Memory') .generateSql(); @@ -87,7 +88,7 @@ describe('CreateTableQuery', () => { const sql = createTable .table('table_name') .onCluster('my_cluster') - .column('column1', createTable.nullable(createTable.string())) + .column('column1', schema.nullable(schema.string())) .engine('Memory') .generateSql(); @@ -99,7 +100,7 @@ describe('CreateTableQuery', () => { const sql = createTable .table('table_name') .onCluster('my_cluster') - .column('column1', createTable.int8()) + .column('column1', schema.int8()) .engine('Memory') .generateSql(); @@ -111,7 +112,7 @@ describe('CreateTableQuery', () => { const sql = createTable .table('table_name') .onCluster('my_cluster') - .column('column1', createTable.int16()) + .column('column1', schema.int16()) .engine('Memory') .generateSql(); @@ -123,7 +124,7 @@ describe('CreateTableQuery', () => { const sql = createTable .table('table_name') .onCluster('my_cluster') - .column('column1', createTable.int32()) + .column('column1', schema.int32()) .engine('Memory') .generateSql(); @@ -135,7 +136,7 @@ describe('CreateTableQuery', () => { const sql = createTable .table('table_name') .onCluster('my_cluster') - .column('column1', createTable.int64()) + .column('column1', schema.int64()) .engine('Memory') .generateSql(); @@ -147,7 +148,7 @@ describe('CreateTableQuery', () => { const sql = createTable .table('table_name') .onCluster('my_cluster') - .column('column1', createTable.int128()) + .column('column1', schema.int128()) .engine('Memory') .generateSql(); @@ -159,7 +160,7 @@ describe('CreateTableQuery', () => { const sql = createTable .table('table_name') .onCluster('my_cluster') - .column('column1', createTable.int256()) + .column('column1', schema.int256()) .engine('Memory') .generateSql(); @@ -175,7 +176,7 @@ describe('CreateTableQuery', () => { const sql = createTable .table('table_name') .onCluster('my_cluster') - .column('column1', createTable.uInt8()) + .column('column1', schema.uInt8()) .engine('Memory') .generateSql(); @@ -187,7 +188,7 @@ describe('CreateTableQuery', () => { const sql = createTable .table('table_name') .onCluster('my_cluster') - .column('column1', createTable.uInt16()) + .column('column1', schema.uInt16()) .engine('Memory') .generateSql(); @@ -199,7 +200,7 @@ describe('CreateTableQuery', () => { const sql = createTable .table('table_name') .onCluster('my_cluster') - .column('column1', createTable.uInt32()) + .column('column1', schema.uInt32()) .engine('Memory') .generateSql(); @@ -211,7 +212,7 @@ describe('CreateTableQuery', () => { const sql = createTable .table('table_name') .onCluster('my_cluster') - .column('column1', createTable.uInt64()) + .column('column1', schema.uInt64()) .engine('Memory') .generateSql(); @@ -223,7 +224,7 @@ describe('CreateTableQuery', () => { const sql = createTable .table('table_name') .onCluster('my_cluster') - .column('column1', createTable.uInt128()) + .column('column1', schema.uInt128()) .engine('Memory') .generateSql(); @@ -235,7 +236,7 @@ describe('CreateTableQuery', () => { const sql = createTable .table('table_name') .onCluster('my_cluster') - .column('column1', createTable.uInt256()) + .column('column1', schema.uInt256()) .engine('Memory') .generateSql(); @@ -247,7 +248,7 @@ describe('CreateTableQuery', () => { const sql = createTable .table('table_name') .onCluster('my_cluster') - .column('column1', createTable.float32()) + .column('column1', schema.float32()) .engine('Memory') .generateSql(); @@ -259,7 +260,7 @@ describe('CreateTableQuery', () => { const sql = createTable .table('table_name') .onCluster('my_cluster') - .column('column1', createTable.float64()) + .column('column1', schema.float64()) .engine('Memory') .generateSql(); @@ -271,7 +272,7 @@ describe('CreateTableQuery', () => { const sql = createTable .table('table_name') .onCluster('my_cluster') - .column('column1', createTable.decimal(10, 2)) + .column('column1', schema.decimal(10, 2)) .engine('Memory') .generateSql(); @@ -283,7 +284,7 @@ describe('CreateTableQuery', () => { const sql = createTable .table('table_name') .onCluster('my_cluster') - .column('column1', createTable.decimal32(10, 2)) + .column('column1', schema.decimal32(10, 2)) .engine('Memory') .generateSql(); @@ -295,7 +296,7 @@ describe('CreateTableQuery', () => { const sql = createTable .table('table_name') .onCluster('my_cluster') - .column('column1', createTable.decimal64(10, 2)) + .column('column1', schema.decimal64(10, 2)) .engine('Memory') .generateSql(); @@ -307,7 +308,7 @@ describe('CreateTableQuery', () => { const sql = createTable .table('table_name') .onCluster('my_cluster') - .column('column1', createTable.decimal128(10, 2)) + .column('column1', schema.decimal128(10, 2)) .engine('Memory') .generateSql(); @@ -319,7 +320,7 @@ describe('CreateTableQuery', () => { const sql = createTable .table('table_name') .onCluster('my_cluster') - .column('column1', createTable.decimal256(10, 2)) + .column('column1', schema.decimal256(10, 2)) .engine('Memory') .generateSql(); @@ -331,7 +332,7 @@ describe('CreateTableQuery', () => { const sql = createTable .table('table_name') .onCluster('my_cluster') - .column('column1', createTable.boolean()) + .column('column1', schema.boolean()) .engine('Memory') .generateSql(); @@ -343,7 +344,7 @@ describe('CreateTableQuery', () => { const sql = createTable .table('table_name') .onCluster('my_cluster') - .column('column1', createTable.string()) + .column('column1', schema.string()) .engine('Memory') .generateSql(); @@ -355,7 +356,7 @@ describe('CreateTableQuery', () => { const sql = createTable .table('table_name') .onCluster('my_cluster') - .column('column1', createTable.fixedString(5)) + .column('column1', schema.fixedString(5)) .engine('Memory') .generateSql(); @@ -367,7 +368,7 @@ describe('CreateTableQuery', () => { const sql = createTable .table('table_name') .onCluster('my_cluster') - .column('column1', createTable.uuid()) + .column('column1', schema.uuid()) .engine('Memory') .generateSql(); @@ -379,7 +380,7 @@ describe('CreateTableQuery', () => { const sql = createTable .table('table_name') .onCluster('my_cluster') - .column('column1', createTable.date()) + .column('column1', schema.date()) .engine('Memory') .generateSql(); @@ -391,7 +392,7 @@ describe('CreateTableQuery', () => { const sql = createTable .table('table_name') .onCluster('my_cluster') - .column('column1', createTable.date32()) + .column('column1', schema.date32()) .engine('Memory') .generateSql(); @@ -403,7 +404,7 @@ describe('CreateTableQuery', () => { const sql = createTable .table('table_name') .onCluster('my_cluster') - .column('column1', createTable.dateTime()) + .column('column1', schema.dateTime()) .engine('Memory') .generateSql(); @@ -415,7 +416,7 @@ describe('CreateTableQuery', () => { const sql = createTable .table('table_name') .onCluster('my_cluster') - .column('column1', createTable.dateTime()) + .column('column1', schema.dateTime()) .engine('Memory') .generateSql(); @@ -427,7 +428,7 @@ describe('CreateTableQuery', () => { const sql = createTable .table('table_name') .onCluster('my_cluster') - .column('column1', createTable.dateTime('Asia/Istanbul')) + .column('column1', schema.dateTime('Asia/Istanbul')) .engine('Memory') .generateSql(); @@ -442,7 +443,7 @@ describe('CreateTableQuery', () => { const sql = createTable .table('table_name') .onCluster('my_cluster') - .column('column1', createTable.dateTime64(3)) + .column('column1', schema.dateTime64(3)) .engine('Memory') .generateSql(); @@ -454,7 +455,7 @@ describe('CreateTableQuery', () => { const sql = createTable .table('table_name') .onCluster('my_cluster') - .column('column1', createTable.dateTime64(3,'Asia/Istanbul')) + .column('column1', schema.dateTime64(3,'Asia/Istanbul')) .engine('Memory') .generateSql(); @@ -467,7 +468,7 @@ describe('CreateTableQuery', () => { const sql = createTable .table('table_name') .onCluster('my_cluster') - .column('column1', createTable.enum({hello: 1, world: 2})) + .column('column1', schema.enum({hello: 1, world: 2})) .engine('Memory') .generateSql(); @@ -479,7 +480,7 @@ describe('CreateTableQuery', () => { const sql = createTable .table('table_name') .onCluster('my_cluster') - .column('column1', createTable.enum(['hello', 'world'])) + .column('column1', schema.enum(['hello', 'world'])) .engine('Memory') .generateSql(); @@ -491,7 +492,7 @@ describe('CreateTableQuery', () => { const sql = createTable .table('table_name') .onCluster('my_cluster') - .column('column1', createTable.enum8({hello: 1, world: 2})) + .column('column1', schema.enum8({hello: 1, world: 2})) .engine('Memory') .generateSql(); @@ -503,7 +504,7 @@ describe('CreateTableQuery', () => { const sql = createTable .table('table_name') .onCluster('my_cluster') - .column('column1', createTable.enum8(['hello', 'world'])) + .column('column1', schema.enum8(['hello', 'world'])) .engine('Memory') .generateSql(); @@ -515,7 +516,7 @@ describe('CreateTableQuery', () => { const sql = createTable .table('table_name') .onCluster('my_cluster') - .column('column1', createTable.enum16({hello: 1, world: 2})) + .column('column1', schema.enum16({hello: 1, world: 2})) .engine('Memory') .generateSql(); @@ -527,7 +528,7 @@ describe('CreateTableQuery', () => { const sql = createTable .table('table_name') .onCluster('my_cluster') - .column('column1', createTable.enum16(['hello', 'world'])) + .column('column1', schema.enum16(['hello', 'world'])) .engine('Memory') .generateSql(); @@ -539,7 +540,7 @@ describe('CreateTableQuery', () => { const sql = createTable .table('table_name') .onCluster('my_cluster') - .column('column1', createTable.lowCardinality(createTable.string())) + .column('column1', schema.lowCardinality(schema.string())) .engine('Memory') .generateSql(); @@ -551,7 +552,7 @@ describe('CreateTableQuery', () => { const sql = createTable .table('table_name') .onCluster('my_cluster') - .column('column1', createTable.array(createTable.string())) + .column('column1', schema.array(schema.string())) .engine('Memory') .generateSql(); @@ -563,7 +564,7 @@ describe('CreateTableQuery', () => { const sql = createTable .table('table_name') .onCluster('my_cluster') - .column('column1', createTable.json()) + .column('column1', schema.json()) .engine('Memory') .generateSql(); @@ -575,7 +576,7 @@ describe('CreateTableQuery', () => { const sql = createTable .table('table_name') .onCluster('my_cluster') - .column('column1', createTable.tuple([ + .column('column1', schema.tuple([ ['s', 'String'], ['i', 'Int64'], ])) @@ -593,30 +594,30 @@ describe('CreateTableQuery', () => { const createTable = getCreateTableQuery(); const sql = createTable .table('uk_price_paid') - .column('price', createTable.uInt32()) - .column('date', createTable.date()) - .column('postcode1', createTable.lowCardinality(createTable.string())) - .column('postcode2', createTable.lowCardinality(createTable.string())) - .column('type', createTable.enum8({ + .column('price', schema.uInt32()) + .column('date', schema.date()) + .column('postcode1', schema.lowCardinality(schema.string())) + .column('postcode2', schema.lowCardinality(schema.string())) + .column('type', schema.enum8({ terraced: 1, 'semi-detached': 2, detached: 3, flat: 4, other: 0 })) - .column('is_new', createTable.uInt8()) - .column('duration', createTable.enum8({ + .column('is_new', schema.uInt8()) + .column('duration', schema.enum8({ freehold: 1, leasehold: 2, unknown: 0 })) - .column('addr1', createTable.string()) - .column('addr2', createTable.string()) - .column('street', createTable.lowCardinality(createTable.string())) - .column('locality', createTable.lowCardinality(createTable.string())) - .column('town', createTable.lowCardinality(createTable.string())) - .column('district', createTable.lowCardinality(createTable.string())) - .column('county', createTable.lowCardinality(createTable.string())) + .column('addr1', schema.string()) + .column('addr2', schema.string()) + .column('street', schema.lowCardinality(schema.string())) + .column('locality', schema.lowCardinality(schema.string())) + .column('town', schema.lowCardinality(schema.string())) + .column('district', schema.lowCardinality(schema.string())) + .column('county', schema.lowCardinality(schema.string())) .engine('MergeTree') .orderBy(['postcode1', 'postcode2', 'addr1', 'addr2']) .generateSql(); diff --git a/__tests__/DeleteQuery.test.ts b/__tests__/DeleteQuery.test.ts index 7d3a3fc..ae047ff 100644 --- a/__tests__/DeleteQuery.test.ts +++ b/__tests__/DeleteQuery.test.ts @@ -1,5 +1,5 @@ import {describe, expect, it} from '@jest/globals'; -import {ClickHouse} from 'clickhouse'; +import {createClient} from '@clickhouse/client' import winston from 'winston'; import {DeleteQuery} from '../src/internal'; @@ -7,7 +7,7 @@ import {DeleteQuery} from '../src/internal'; jest.mock('winston'); // @ts-ignore -jest.mock('clickhouse'); +jest.mock('@clickhouse/client'); function createLogger() { return winston.createLogger({ @@ -16,7 +16,7 @@ function createLogger() { } function getDeleteQuery(): DeleteQuery { - return new DeleteQuery(new ClickHouse({}), createLogger()); + return new DeleteQuery(createClient(), createLogger()); } describe('DeleteQuery', () => { diff --git a/__tests__/FilterableQuery.test.ts b/__tests__/FilterableQuery.test.ts index aeabd9c..2879e84 100644 --- a/__tests__/FilterableQuery.test.ts +++ b/__tests__/FilterableQuery.test.ts @@ -1,6 +1,5 @@ import {describe, expect, it} from '@jest/globals'; - -import {ClickHouse} from 'clickhouse'; +import {createClient} from '@clickhouse/client' import winston from 'winston'; import {FilterableQuery, Query} from '../src/internal'; @@ -8,7 +7,7 @@ import {FilterableQuery, Query} from '../src/internal'; jest.mock('winston'); // @ts-ignore -jest.mock('clickhouse'); +jest.mock('@clickhouse/client'); function createLogger() { return winston.createLogger({ @@ -17,7 +16,7 @@ function createLogger() { } function getQuery(): Query { - return new Query(new ClickHouse({}), createLogger()); + return new Query(createClient(), createLogger()); } describe('FilterableQuery', () => { diff --git a/__tests__/InsertQuery.test.ts b/__tests__/InsertQuery.test.ts index 5af338f..2f80997 100644 --- a/__tests__/InsertQuery.test.ts +++ b/__tests__/InsertQuery.test.ts @@ -1,5 +1,5 @@ import {describe, expect, it} from '@jest/globals'; -import {ClickHouse} from 'clickhouse'; +import {createClient} from '@clickhouse/client' import winston from 'winston'; import {InsertQuery, Query} from '../src/internal'; @@ -7,7 +7,7 @@ import {InsertQuery, Query} from '../src/internal'; jest.mock('winston'); // @ts-ignore -jest.mock('clickhouse'); +jest.mock('@clickhouse/client'); function createLogger() { return winston.createLogger({ @@ -16,11 +16,11 @@ function createLogger() { } function getInsertQuery(): InsertQuery { - return new InsertQuery(new ClickHouse({}), createLogger()); + return new InsertQuery(createClient(), createLogger()); } function getQuery(): Query { - return new Query(new ClickHouse({}), createLogger()); + return new Query(createClient(), createLogger()); } describe('InsertQuery', () => { diff --git a/__tests__/Query.test.ts b/__tests__/Query.test.ts index 023de0e..cb5240c 100644 --- a/__tests__/Query.test.ts +++ b/__tests__/Query.test.ts @@ -1,5 +1,5 @@ import {describe, expect, it} from '@jest/globals'; -import {ClickHouse} from 'clickhouse'; +import {createClient} from '@clickhouse/client' import winston from 'winston'; import {fx, expr} from '../src'; import {Query} from '../src/internal'; @@ -8,7 +8,7 @@ import {Query} from '../src/internal'; jest.mock('winston'); // @ts-ignore -jest.mock('clickhouse'); +jest.mock('@clickhouse/client'); function createLogger() { return winston.createLogger({ @@ -17,7 +17,7 @@ function createLogger() { } function getQuery(): Query { - return new Query(new ClickHouse({}), createLogger()); + return new Query(createClient({}), createLogger()); } describe('Query', () => { diff --git a/__tests__/UpdateQuery.test.ts b/__tests__/UpdateQuery.test.ts index 0ea1c2a..c0f81dc 100644 --- a/__tests__/UpdateQuery.test.ts +++ b/__tests__/UpdateQuery.test.ts @@ -1,5 +1,5 @@ import {describe, expect, it} from '@jest/globals'; -import {ClickHouse} from 'clickhouse'; +import {createClient} from '@clickhouse/client' import winston from 'winston'; import {UpdateQuery} from '../src/internal'; @@ -7,7 +7,7 @@ import {UpdateQuery} from '../src/internal'; jest.mock('winston'); // @ts-ignore -jest.mock('clickhouse'); +jest.mock('@clickhouse/client'); function createLogger() { return winston.createLogger({ @@ -16,7 +16,7 @@ function createLogger() { } function getUpdateQuery(): UpdateQuery { - return new UpdateQuery(new ClickHouse({}), createLogger()); + return new UpdateQuery(createClient(), createLogger()); } describe('UpdateQuery', () => { diff --git a/package.json b/package.json index ef7dc51..03dac90 100644 --- a/package.json +++ b/package.json @@ -1,7 +1,7 @@ { "name": "clickhouse-query", - "version": "1.10.0", - "description": "ClickHouse Query is intuitive query builder to overcome the complexity of ClickHouse SQL syntax.", + "version": "1.10.1", + "description": "ClickHouse Query is intuitive query builder for ClickHouse SQL syntax.", "main": "dist/src/index.js", "types": "dist/src/index.d.ts", "repository": { @@ -27,7 +27,7 @@ "toc": "./node_modules/.bin/markdown-toc README.md -i " }, "dependencies": { - "clickhouse": "^2.6.0", + "@clickhouse/client": "^0.2.2", "winston": "^3.8.2" }, "devDependencies": { diff --git a/src/AlterTable/AlterTableQuery.ts b/src/AlterTable/AlterTableQuery.ts index ce8a51e..8af5ceb 100644 --- a/src/AlterTable/AlterTableQuery.ts +++ b/src/AlterTable/AlterTableQuery.ts @@ -1,4 +1,3 @@ -import {ClickHouse} from 'clickhouse'; import {Logger} from 'winston'; import {FilterableQuery} from '../internal'; import {AddColumn} from './AddColumn'; @@ -7,9 +6,11 @@ import {RenameColumn} from './RenameColumn'; import {ClearColumn} from './ClearColumn'; import {CommentColumn} from './CommentColumn'; import {ModifyColumn} from './ModifyColumn'; +import {ClickHouseClient} from '@clickhouse/client'; +import Stream from 'stream'; export class AlterTableQuery extends FilterableQuery { - private readonly connection: ClickHouse; + private readonly connection: ClickHouseClient; private readonly logger: Logger | null; private tablePart: string | null = null; @@ -22,7 +23,7 @@ export class AlterTableQuery extends FilterableQuery { private commentColumnPart: CommentColumn | null = null; private modifyColumnPart: ModifyColumn | null = null; - constructor(ch: ClickHouse, logger: Logger | null = null) { + constructor(ch: ClickHouseClient, logger: Logger | null = null) { super(); this.connection = ch; this.logger = logger; @@ -106,11 +107,14 @@ export class AlterTableQuery extends FilterableQuery { return sql; } - public async execute() { + public async execute() { const sql = this.generateSql(); if (this.logger !== null) { this.logger.info('ClickHouse query SQL: ' + sql); } - return await (this.connection.query(sql).toPromise() as Promise); + + await this.connection.command({ + query: sql, + }); } } diff --git a/src/CreateTableQuery.ts b/src/CreateTableQuery.ts index 198601f..f116c19 100644 --- a/src/CreateTableQuery.ts +++ b/src/CreateTableQuery.ts @@ -1,9 +1,10 @@ -import {ClickHouse} from 'clickhouse'; import {Logger} from 'winston'; import {schema} from './index'; +import {ClickHouseClient} from '@clickhouse/client'; +import Stream from 'stream'; export class CreateTableQuery { - private readonly connection: ClickHouse; + private readonly connection: ClickHouseClient; private readonly logger: Logger | null; private tablePart: string | null = null; @@ -18,7 +19,7 @@ export class CreateTableQuery { private orderByPart: Array = []; - constructor(ch: ClickHouse, logger: Logger | null) { + constructor(ch: ClickHouseClient, logger: Logger | null) { this.connection = ch; this.logger = logger; } @@ -116,12 +117,14 @@ export class CreateTableQuery { return sql; } - public async execute() { + public async execute() { const sql = this.generateSql(); if (this.logger !== null) { this.logger.info('ClickHouse query SQL: ' + sql); } - return await (this.connection.query(sql).toPromise() as Promise); + await this.connection.command({ + query: sql, + }) } /** diff --git a/src/DeleteQuery.ts b/src/DeleteQuery.ts index 028c638..6be7bd5 100644 --- a/src/DeleteQuery.ts +++ b/src/DeleteQuery.ts @@ -1,15 +1,16 @@ -import {ClickHouse} from 'clickhouse'; +import {ClickHouseClient} from '@clickhouse/client'; import {Logger} from 'winston'; import {FilterableQuery} from './internal'; import {expr} from './index'; +import Stream from 'stream'; export class DeleteQuery extends FilterableQuery { - private readonly connection: ClickHouse; + private readonly connection: ClickHouseClient; private readonly logger: Logger | null; private tablePart: string | null = null; - constructor(ch: ClickHouse, logger: Logger | null) { + constructor(ch: ClickHouseClient, logger: Logger | null) { super(); this.connection = ch; this.logger = logger; @@ -35,11 +36,11 @@ export class DeleteQuery extends FilterableQuery { return `ALTER TABLE ${this.tablePart} DELETE ${this.generateWhere()}`; } - public async execute() { + public async execute() { const sql = this.generateSql(); if (this.logger !== null) { this.logger.info('ClickHouse query SQL: ' + sql); } - return await (this.connection.query(sql).toPromise() as Promise); + await this.connection.command({query: sql}); } } diff --git a/src/InsertQuery.ts b/src/InsertQuery.ts index 36ecf4d..210bede 100644 --- a/src/InsertQuery.ts +++ b/src/InsertQuery.ts @@ -1,22 +1,19 @@ -import {ClickHouse} from 'clickhouse'; +import {ClickHouseClient} from '@clickhouse/client'; import {Logger} from 'winston'; import {Query} from './Query'; - -type FileFormats = 'none' +import Stream from 'stream'; type Values = Array | Query; export class InsertQuery { - private readonly connection: ClickHouse; + private readonly connection: ClickHouseClient; private readonly logger: Logger | null; private intoPart: string | null = null; private columnsPart: string[] | null = null; private valuesPart: Values | null = null; - private valuesFromFile: [string, string] | null = null; - - constructor(ch: ClickHouse, logger: Logger | null) { + constructor(ch: ClickHouseClient, logger: Logger | null) { this.connection = ch; this.logger = logger; } @@ -103,11 +100,13 @@ export class InsertQuery { } } - public async execute() { + public async execute() { const sql = this.generateSql(); if (this.logger !== null) { this.logger.info('ClickHouse query SQL: ' + sql); } - return await (this.connection.query(sql).toPromise() as Promise); + await this.connection.command({ + query: sql, + }); } } diff --git a/src/Query.ts b/src/Query.ts index fbebeb5..8fa14b1 100644 --- a/src/Query.ts +++ b/src/Query.ts @@ -1,6 +1,7 @@ -import {ClickHouse} from 'clickhouse'; +import {ClickHouseClient} from '@clickhouse/client'; import {Logger} from 'winston'; import {FilterableQuery, Operator, WherePart, WhereValueCondition, Expression} from './internal'; +import Stream from 'stream'; type Selectable = Array | string; type SelectParams = Selectable | '*'; @@ -25,7 +26,7 @@ type JoinOperator = | 'LEFT ASOF JOIN'; export class Query extends FilterableQuery { - private readonly connection: ClickHouse; + private readonly connection: ClickHouseClient; private readonly logger: Logger | null; private withPart: [Selectable | Query | string | null, string | null] = [null, null]; private selectPart: SelectParams = '*'; @@ -39,7 +40,7 @@ export class Query extends FilterableQuery { private joinPart: Array<[JoinOperator, Query | string, string, string]> = []; private aliasPart: [string, 'first' | 'last'] | null = null; - constructor(ch: ClickHouse, logger: Logger | null) { + constructor(ch: ClickHouseClient, logger: Logger | null) { super(); this.connection = ch; this.logger = logger; @@ -259,7 +260,12 @@ export class Query extends FilterableQuery { this.logger.info('ClickHouse query template: ' + sql); this.logger.info('ClickHouse query SQL: ' + this.replaceParamsWithValues(sql, params)); } - return await (this.connection.query(sql, {params}).toPromise() as Promise); + const res = await this.connection.query({ + query: sql, + query_params: params, + format: 'JSONEachRow' + }); + return res.json(); } private replaceParamsWithValues(sql: string, params: Record) { diff --git a/src/QueryBuilder.ts b/src/QueryBuilder.ts index 96673b1..4375f50 100644 --- a/src/QueryBuilder.ts +++ b/src/QueryBuilder.ts @@ -1,4 +1,3 @@ -import {ClickHouse} from 'clickhouse'; import {Query} from './Query'; import {Logger} from 'winston'; import {InsertQuery} from './InsertQuery'; @@ -6,13 +5,14 @@ import {DeleteQuery} from './DeleteQuery'; import {UpdateQuery} from './UpdateQuery'; import {CreateTableQuery} from './CreateTableQuery'; import {AlterTableQuery} from './AlterTable/AlterTableQuery'; - +import {ClickHouseClient} from '@clickhouse/client'; +import Stream from 'stream'; export class QueryBuilder { - private readonly connection: ClickHouse; + private readonly connection: ClickHouseClient; private readonly logger: Logger | null; - constructor(ch: ClickHouse, logger: Logger | null = null) { + constructor(ch: ClickHouseClient, logger: Logger | null = null) { this.connection = ch; this.logger = logger; } diff --git a/src/UpdateQuery.ts b/src/UpdateQuery.ts index 8ae8947..4daa32a 100644 --- a/src/UpdateQuery.ts +++ b/src/UpdateQuery.ts @@ -1,17 +1,18 @@ -import {ClickHouse} from 'clickhouse'; +import {ClickHouseClient} from '@clickhouse/client'; import {Logger} from 'winston'; import {FilterableQuery} from './FilterableQuery'; import {Expression} from './Expression'; +import Stream from 'stream'; export class UpdateQuery extends FilterableQuery { - private readonly connection: ClickHouse; + private readonly connection: ClickHouseClient; private readonly logger: Logger | null; private tablePart: string | null = null; private valuesPart: Array<[string, unknown]> = []; - constructor(ch: ClickHouse, logger: Logger | null) { + constructor(ch: ClickHouseClient, logger: Logger | null) { super(); this.connection = ch; this.logger = logger; @@ -57,12 +58,15 @@ export class UpdateQuery extends FilterableQuery { return sql; } - public async execute() { + public async execute() { const sql = this.generateSql(); if (this.logger !== null) { this.logger.info('ClickHouse query SQL: ' + sql); } - return await (this.connection.query(sql).toPromise() as Promise); + + await this.connection.command({ + query: sql, + }); } private encodeValue(value: unknown): string | number { diff --git a/src/internal.ts b/src/internal.ts index 2295e4f..cac9b1d 100644 --- a/src/internal.ts +++ b/src/internal.ts @@ -8,3 +8,4 @@ export * from './InsertQuery'; export * from './UpdateQuery'; export * from './DeleteQuery'; export * from './CreateTableQuery'; +export * from './SchemaType'; diff --git a/yarn.lock b/yarn.lock index 710db60..41f355d 100644 --- a/yarn.lock +++ b/yarn.lock @@ -297,6 +297,18 @@ resolved "https://registry.yarnpkg.com/@bcoe/v8-coverage/-/v8-coverage-0.2.3.tgz#75a2e8b51cb758a7553d6804a5932d7aace75c39" integrity sha512-0hYQ8SB4Db5zvZB4axdMHGwEaQjkZzFjQiN9LVYvIFB2nSUHW9tYpxWriPrWDASIxiaXax83REcLxuSdnGPZtw== +"@clickhouse/client-common@0.2.2": + version "0.2.2" + resolved "https://registry.yarnpkg.com/@clickhouse/client-common/-/client-common-0.2.2.tgz#0690046241140a51ba5b0c0b9298c3cb3cf20974" + integrity sha512-jlom9zLfcDzX9E3off93ZD3CPOkClyM213Y7TN1datkuRGKMvVyj1k0KXaMekhbRev+FTe85CqfoD5eq6qOnPg== + +"@clickhouse/client@^0.2.2": + version "0.2.2" + resolved "https://registry.yarnpkg.com/@clickhouse/client/-/client-0.2.2.tgz#a6358aa2342ee3f2850cdb2f47a9e1d6fbde5757" + integrity sha512-2faBnDS4x7ZkcOZqi3f6H967kH+nOfJLhBTWWjz0wTSBnEJBXRtePhN/ZY0NJIKc9Ga5w41Pf67mQgm6Dm/1/w== + dependencies: + "@clickhouse/client-common" "0.2.2" + "@colors/colors@1.5.0": version "1.5.0" resolved "https://registry.yarnpkg.com/@colors/colors/-/colors-1.5.0.tgz#bb504579c1cae923e6576a4f5da43d25f97bdbd9" @@ -707,14 +719,6 @@ dependencies: "@types/yargs-parser" "*" -JSONStream@1.3.4: - version "1.3.4" - resolved "https://registry.yarnpkg.com/JSONStream/-/JSONStream-1.3.4.tgz#615bb2adb0cd34c8f4c447b5f6512fa1d8f16a2e" - integrity sha512-Y7vfi3I5oMOYIr+WxV8NZxDSwcbNgzdKYsTNInmycOq9bUYwGg9ryu57Wg5NLmCjqdFPNUmpMBo3kSJN9tCbXg== - dependencies: - jsonparse "^1.2.0" - through ">=2.2.7 <3" - acorn-walk@^8.1.1: version "8.2.0" resolved "https://registry.yarnpkg.com/acorn-walk/-/acorn-walk-8.2.0.tgz#741210f2e2426454508853a2f44d0ab83b7f69c1" @@ -725,16 +729,6 @@ acorn@^8.4.1: resolved "https://registry.yarnpkg.com/acorn/-/acorn-8.8.1.tgz#0a3f9cbecc4ec3bea6f0a80b66ae8dd2da250b73" integrity sha512-7zFpHzhnqYKrkYdUjF1HI1bzd0VygEGX8lFk4k5zVMqHEoES+P+7TKI+EvLO9WVMJ8eekdO0aDEK044xTXwPPA== -ajv@^6.12.3: - version "6.12.6" - resolved "https://registry.yarnpkg.com/ajv/-/ajv-6.12.6.tgz#baf5a62e802b07d977034586f8c3baf5adf26df4" - integrity sha512-j3fVLgvTo527anyYyJOGTYJbG+vnnQYvE0m5mmkc1TK+nxAppkCLMIL0aZ4dblVCNoGShhm+kzE4ZUykBoMg4g== - dependencies: - fast-deep-equal "^3.1.1" - fast-json-stable-stringify "^2.0.0" - json-schema-traverse "^0.4.1" - uri-js "^4.2.2" - ansi-escapes@^4.2.1: version "4.3.2" resolved "https://registry.yarnpkg.com/ansi-escapes/-/ansi-escapes-4.3.2.tgz#6b2291d1db7d98b6521d5f1efa42d0f3a9feb65e" @@ -798,28 +792,11 @@ argparse@^1.0.10, argparse@^1.0.7: dependencies: sprintf-js "~1.0.2" -asn1@~0.2.3: - version "0.2.6" - resolved "https://registry.yarnpkg.com/asn1/-/asn1-0.2.6.tgz#0d3a7bb6e64e02a90c0303b31f292868ea09a08d" - integrity sha512-ix/FxPn0MDjeyJ7i/yoHGFt/EX6LyNbxSEhPPXODPL+KB0VPk86UYfL0lMdy+KCnv+fmvIzySwaK5COwqVbWTQ== - dependencies: - safer-buffer "~2.1.0" - -assert-plus@1.0.0, assert-plus@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/assert-plus/-/assert-plus-1.0.0.tgz#f12e0f3c5d77b0b1cdd9146942e4e96c1e4dd525" - integrity sha512-NfJ4UzBCcQGLDlQq7nHxH+tv3kyZ0hHQqF5BO6J7tNJeP5do1llPr8dZ8zHonfhAu0PHAdMkSo+8o0wxg9lZWw== - async@^3.2.3: version "3.2.4" resolved "https://registry.yarnpkg.com/async/-/async-3.2.4.tgz#2d22e00f8cddeb5fde5dd33522b56d1cf569a81c" integrity sha512-iAB+JbDEGXhyIUavoDl9WP/Jj106Kz9DEn1DPgYw5ruDn0e3Wgi3sKFm55sASdGBNOQB8F59d9qQ7deqrHA8wQ== -asynckit@^0.4.0: - version "0.4.0" - resolved "https://registry.yarnpkg.com/asynckit/-/asynckit-0.4.0.tgz#c79ed97f7f34cb8f2ba1bc9790bcc366474b4b79" - integrity sha512-Oei9OH4tRh0YqU3GxhX79dM/mwVgvbZJaSNaRk+bshkj0S5cfHcgYakreBjrHwatXKbz+IoIdYLxrKim2MjW0Q== - autolinker@~0.28.0: version "0.28.1" resolved "https://registry.yarnpkg.com/autolinker/-/autolinker-0.28.1.tgz#0652b491881879f0775dace0cdca3233942a4e47" @@ -827,16 +804,6 @@ autolinker@~0.28.0: dependencies: gulp-header "^1.7.1" -aws-sign2@~0.7.0: - version "0.7.0" - resolved "https://registry.yarnpkg.com/aws-sign2/-/aws-sign2-0.7.0.tgz#b46e890934a9591f2d2f6f86d7e6a9f1b3fe76a8" - integrity sha512-08kcGqnYf/YmjoRhfxyu+CLxBjUtHLXLXX/vUfx9l2LYzG3c1m61nrpyFUZI6zeS+Li/wWMMidD9KgrqtGq3mA== - -aws4@^1.8.0: - version "1.11.0" - resolved "https://registry.yarnpkg.com/aws4/-/aws4-1.11.0.tgz#d61f46d83b2519250e2784daf5b09479a8b41c59" - integrity sha512-xh1Rl34h6Fi1DC2WWKfxUTVqRsNnr6LsKz2+hfwDxQJWmrx8+c7ylaqBMcHfl1U1r2dsifOvKX3LQuLNZ+XSvA== - babel-jest@^29.3.1: version "29.3.1" resolved "https://registry.yarnpkg.com/babel-jest/-/babel-jest-29.3.1.tgz#05c83e0d128cd48c453eea851482a38782249f44" @@ -902,13 +869,6 @@ balanced-match@^1.0.0: resolved "https://registry.yarnpkg.com/balanced-match/-/balanced-match-1.0.2.tgz#e83e3a7e3f300b34cb9d87f615fa0cbf357690ee" integrity sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw== -bcrypt-pbkdf@^1.0.0: - version "1.0.2" - resolved "https://registry.yarnpkg.com/bcrypt-pbkdf/-/bcrypt-pbkdf-1.0.2.tgz#a4301d389b6a43f9b67ff3ca11a3f6637e360e9e" - integrity sha512-qeFIXtP4MSoi6NLqO12WfqARWWuCKi2Rn/9hJLEmtB5yTNr9DqFWkJRCf2qShWzPeAMRnOgCrq0sg/KLv5ES9w== - dependencies: - tweetnacl "^0.14.3" - brace-expansion@^1.1.7: version "1.1.11" resolved "https://registry.yarnpkg.com/brace-expansion/-/brace-expansion-1.1.11.tgz#3c7fcbf529d87226f3d2f52b966ff5271eb441dd" @@ -973,11 +933,6 @@ caniuse-lite@^1.0.30001400: resolved "https://registry.yarnpkg.com/caniuse-lite/-/caniuse-lite-1.0.30001434.tgz#ec1ec1cfb0a93a34a0600d37903853030520a4e5" integrity sha512-aOBHrLmTQw//WFa2rcF1If9fa3ypkC1wzqqiKHgfdrXTWcU8C4gKVZT77eQAPWN1APys3+uQ0Df07rKauXGEYA== -caseless@~0.12.0: - version "0.12.0" - resolved "https://registry.yarnpkg.com/caseless/-/caseless-0.12.0.tgz#1b681c21ff84033c826543090689420d187151dc" - integrity sha512-4tYFyifaFfGacoiObjJegolkwSU4xQNGbVgUiNYVUxbQ2x2lUsFvY4hVgVzGiIe6WLOPqycWXA40l+PWsxthUw== - chalk@^2.0.0: version "2.4.2" resolved "https://registry.yarnpkg.com/chalk/-/chalk-2.4.2.tgz#cd42541677a54333cf541a49108c1432b44c9424" @@ -1010,20 +965,6 @@ cjs-module-lexer@^1.0.0: resolved "https://registry.yarnpkg.com/cjs-module-lexer/-/cjs-module-lexer-1.2.2.tgz#9f84ba3244a512f3a54e5277e8eef4c489864e40" integrity sha512-cOU9usZw8/dXIXKtwa8pM0OTJQuJkxMN6w30csNRUerHfeQ5R6U3kkU/FtJeIf3M202OHfY2U8ccInBG7/xogA== -clickhouse@^2.6.0: - version "2.6.0" - resolved "https://registry.yarnpkg.com/clickhouse/-/clickhouse-2.6.0.tgz#6218e3978fd5f343bdff395d988c5cd44e8b9a03" - integrity sha512-HC5OV99GJOup4qZsTuWWPpXlj+847Z0OeygDU2x22rNYost0V/vWapzFWYZdV/5iRbGMrhFQPOyQEzmGvoaWRQ== - dependencies: - JSONStream "1.3.4" - lodash "4.17.21" - querystring "0.2.0" - request "2.88.0" - stream2asynciter "1.0.3" - through "2.3.8" - tsv "0.2.0" - uuid "3.4.0" - cliui@^8.0.1: version "8.0.1" resolved "https://registry.yarnpkg.com/cliui/-/cliui-8.0.1.tgz#0c04b075db02cbfe60dc8e6cf2f5486b1a3608aa" @@ -1096,13 +1037,6 @@ colorspace@1.1.x: color "^3.1.3" text-hex "1.0.x" -combined-stream@^1.0.6, combined-stream@~1.0.6: - version "1.0.8" - resolved "https://registry.yarnpkg.com/combined-stream/-/combined-stream-1.0.8.tgz#c3d45a8b34fd730631a110a8a2520682b31d5a7f" - integrity sha512-FQN4MRfuJeHf7cBbBMJFXhKSDq+2kAArBlmRBvcvFE5BB1HZKXtSFASDhdlz9zOYwxh8lDdnvmMOe/+5cdoEdg== - dependencies: - delayed-stream "~1.0.0" - concat-map@0.0.1: version "0.0.1" resolved "https://registry.yarnpkg.com/concat-map/-/concat-map-0.0.1.tgz#d8a96bd77fd68df7793a73036a3ba0d5405d477b" @@ -1135,11 +1069,6 @@ convert-source-map@^2.0.0: resolved "https://registry.yarnpkg.com/convert-source-map/-/convert-source-map-2.0.0.tgz#4b560f649fc4e918dd0ab75cf4961e8bc882d82a" integrity sha512-Kvp459HrV2FEJ1CAsi1Ku+MY3kasH19TFykTz2xWmMeq6bk2NU3XXvfJ+Q61m0xktWwt+1HSYf3JZsTms3aRJg== -core-util-is@1.0.2: - version "1.0.2" - resolved "https://registry.yarnpkg.com/core-util-is/-/core-util-is-1.0.2.tgz#b5fd54220aa2bc5ab57aab7140c940754503c1a7" - integrity sha512-3lqz5YjWTYnW6dlDa5TLaTCcShfar1e40rmcJVwCBJC6mWlFuj0eCHIElmG1g5kyuJ/GD+8Wn4FFCcz4gJPfaQ== - core-util-is@~1.0.0: version "1.0.3" resolved "https://registry.yarnpkg.com/core-util-is/-/core-util-is-1.0.3.tgz#a6042d3634c2b27e9328f837b965fac83808db85" @@ -1159,13 +1088,6 @@ cross-spawn@^7.0.3: shebang-command "^2.0.0" which "^2.0.1" -dashdash@^1.12.0: - version "1.14.1" - resolved "https://registry.yarnpkg.com/dashdash/-/dashdash-1.14.1.tgz#853cfa0f7cbe2fed5de20326b8dd581035f6e2f0" - integrity sha512-jRFi8UDGo6j+odZiEpjazZaWqEal3w/basFjQHQEwVtZJGDpxbH1MeYluwCS8Xq5wmLJooDlMgvVarmWfGM44g== - dependencies: - assert-plus "^1.0.0" - debug@^4.1.0, debug@^4.1.1: version "4.3.4" resolved "https://registry.yarnpkg.com/debug/-/debug-4.3.4.tgz#1319f6579357f2338d3337d2cdd4914bb5dcc865" @@ -1183,11 +1105,6 @@ deepmerge@^4.2.2: resolved "https://registry.yarnpkg.com/deepmerge/-/deepmerge-4.2.2.tgz#44d2ea3679b8f4d4ffba33f03d865fc1e7bf4955" integrity sha512-FJ3UgI4gIl+PHZm53knsuSFpE+nESMr7M4v9QcgB7S63Kj/6WqMiFQJpBBYz1Pt+66bZpP3Q7Lye0Oo9MPKEdg== -delayed-stream@~1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/delayed-stream/-/delayed-stream-1.0.0.tgz#df3ae199acadfb7d440aaae0b29e2272b24ec619" - integrity sha512-ZySD7Nf91aLB0RxL4KGrKHBXl7Eds1DAmEdcoVawXnLD7SDhpNgtuII2aAkg7a7QS41jxPSZ17p4VdGnMHk3MQ== - detect-newline@^3.0.0: version "3.1.0" resolved "https://registry.yarnpkg.com/detect-newline/-/detect-newline-3.1.0.tgz#576f5dfc63ae1a192ff192d8ad3af6308991b651" @@ -1208,14 +1125,6 @@ diff@^4.0.1: resolved "https://registry.yarnpkg.com/diff/-/diff-4.0.2.tgz#60f3aecb89d5fae520c11aa19efc2bb982aade7d" integrity sha512-58lmxKSA4BNyLz+HHMUzlOEpg09FV+ev6ZMe3vJihgdxzgcwZ8VoEEPmALCZG9LmqfVoNMMKpttIYTVG6uDY7A== -ecc-jsbn@~0.1.1: - version "0.1.2" - resolved "https://registry.yarnpkg.com/ecc-jsbn/-/ecc-jsbn-0.1.2.tgz#3a83a904e54353287874c564b7549386849a98c9" - integrity sha512-eh9O+hwRHNbG4BLTjEl3nw044CkGm5X6LoaCf7LPp7UU8Qrt47JYNi6nPX8xjW97TKGKm1ouctg0QSpZe9qrnw== - dependencies: - jsbn "~0.1.0" - safer-buffer "^2.1.0" - electron-to-chromium@^1.4.251: version "1.4.284" resolved "https://registry.yarnpkg.com/electron-to-chromium/-/electron-to-chromium-1.4.284.tgz#61046d1e4cab3a25238f6bf7413795270f125592" @@ -1308,27 +1217,7 @@ extend-shallow@^2.0.1: dependencies: is-extendable "^0.1.0" -extend@~3.0.2: - version "3.0.2" - resolved "https://registry.yarnpkg.com/extend/-/extend-3.0.2.tgz#f8b1136b4071fbd8eb140aff858b1019ec2915fa" - integrity sha512-fjquC59cD7CyW6urNXK0FBufkZcoiGG80wTuPujX590cB5Ttln20E2UB4S/WARVqhXffZl2LNgS+gQdPIIim/g== - -extsprintf@1.3.0: - version "1.3.0" - resolved "https://registry.yarnpkg.com/extsprintf/-/extsprintf-1.3.0.tgz#96918440e3041a7a414f8c52e3c574eb3c3e1e05" - integrity sha512-11Ndz7Nv+mvAC1j0ktTa7fAb0vLyGGX+rMHNBYQviQDGU0Hw7lhctJANqbPhu9nV9/izT/IntTgZ7Im/9LJs9g== - -extsprintf@^1.2.0: - version "1.4.1" - resolved "https://registry.yarnpkg.com/extsprintf/-/extsprintf-1.4.1.tgz#8d172c064867f235c0c84a596806d279bf4bcc07" - integrity sha512-Wrk35e8ydCKDj/ArClo1VrPVmN8zph5V4AtHwIuHhvMXsKf73UT3BOD+azBIW+3wOJ4FhEH7zyaJCFvChjYvMA== - -fast-deep-equal@^3.1.1: - version "3.1.3" - resolved "https://registry.yarnpkg.com/fast-deep-equal/-/fast-deep-equal-3.1.3.tgz#3a7d56b559d6cbc3eb512325244e619a65c6c525" - integrity sha512-f3qQ9oQy9j2AhBe/H9VC91wLmKBCCU/gDOnKNAYG5hswO7BLKj09Hc5HYNz9cGI++xlpDCIgDaitVs03ATR84Q== - -fast-json-stable-stringify@2.x, fast-json-stable-stringify@^2.0.0, fast-json-stable-stringify@^2.1.0: +fast-json-stable-stringify@2.x, fast-json-stable-stringify@^2.1.0: version "2.1.0" resolved "https://registry.yarnpkg.com/fast-json-stable-stringify/-/fast-json-stable-stringify-2.1.0.tgz#874bf69c6f404c2b5d99c481341399fd55892633" integrity sha512-lhd/wF+Lk98HZoTCtlVraHtfh5XYijIjalXck7saUtuanSDyLMxnHhSXEDJqHxD7msR8D0uCmqlkwjCV8xvwHw== @@ -1381,20 +1270,6 @@ for-in@^1.0.2: resolved "https://registry.yarnpkg.com/for-in/-/for-in-1.0.2.tgz#81068d295a8142ec0ac726c6e2200c30fb6d5e80" integrity sha512-7EwmXrOjyL+ChxMhmG5lnW9MPt1aIeZEwKhQzoBUdTV0N3zuwWDZYVJatDvZ2OyzPUvdIAZDsCetk3coyMfcnQ== -forever-agent@~0.6.1: - version "0.6.1" - resolved "https://registry.yarnpkg.com/forever-agent/-/forever-agent-0.6.1.tgz#fbc71f0c41adeb37f96c577ad1ed42d8fdacca91" - integrity sha512-j0KLYPhm6zeac4lz3oJ3o65qvgQCcPubiyotZrXqEaG4hNagNYO8qdlUrX5vwqv9ohqeT/Z3j6+yW067yWWdUw== - -form-data@~2.3.2: - version "2.3.3" - resolved "https://registry.yarnpkg.com/form-data/-/form-data-2.3.3.tgz#dcce52c05f644f298c6a7ab936bd724ceffbf3a6" - integrity sha512-1lLKB2Mu3aGP1Q/2eCOx0fNbRMe7XdwktwOruhfqqd0rIJWwN4Dh+E3hrPSlDCXnSR7UtZ1N38rVXm+6+MEhJQ== - dependencies: - asynckit "^0.4.0" - combined-stream "^1.0.6" - mime-types "^2.1.12" - fs.realpath@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/fs.realpath/-/fs.realpath-1.0.0.tgz#1504ad2523158caa40db4a2787cb01411994ea4f" @@ -1430,13 +1305,6 @@ get-stream@^6.0.0: resolved "https://registry.yarnpkg.com/get-stream/-/get-stream-6.0.1.tgz#a262d8eef67aced57c2852ad6167526a43cbf7b7" integrity sha512-ts6Wi+2j3jQjqi70w5AlN8DFnkSwC+MqmxEzdEALB2qXZYV3X/b1CTfgPLGJNMeAWxdPfU8FO1ms3NUfaHCPYg== -getpass@^0.1.1: - version "0.1.7" - resolved "https://registry.yarnpkg.com/getpass/-/getpass-0.1.7.tgz#5eff8e3e684d569ae4cb2b1282604e8ba62149fa" - integrity sha512-0fzj9JxOLfJ+XGLhR8ze3unN0KZCgZwiSSDz168VERjK8Wl8kVSdcu2kspd4s4wtAa1y/qrVRiAA0WclVsu0ng== - dependencies: - assert-plus "^1.0.0" - glob@^7.1.3, glob@^7.1.4: version "7.2.3" resolved "https://registry.yarnpkg.com/glob/-/glob-7.2.3.tgz#b8df0fb802bbfa8e89bd1d938b4e16578ed44f2b" @@ -1479,19 +1347,6 @@ gulp-header@^1.7.1: lodash.template "^4.4.0" through2 "^2.0.0" -har-schema@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/har-schema/-/har-schema-2.0.0.tgz#a94c2224ebcac04782a0d9035521f24735b7ec92" - integrity sha512-Oqluz6zhGX8cyRaTQlFMPw80bSJVG2x/cFb8ZPhUILGgHka9SsokCCOQgpveePerqidZOrT14ipqfJb7ILcW5Q== - -har-validator@~5.1.0: - version "5.1.5" - resolved "https://registry.yarnpkg.com/har-validator/-/har-validator-5.1.5.tgz#1f0803b9f8cb20c0fa13822df1ecddb36bde1efd" - integrity sha512-nmT2T0lljbxdQZfspsno9hgrG3Uir6Ks5afism62poxqBM6sDnMEuPmzTq8XN0OEwqKLLdh1jQI3qyE66Nzb3w== - dependencies: - ajv "^6.12.3" - har-schema "^2.0.0" - has-flag@^3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/has-flag/-/has-flag-3.0.0.tgz#b5d454dc2199ae225699f3467e5a07f3b955bafd" @@ -1514,15 +1369,6 @@ html-escaper@^2.0.0: resolved "https://registry.yarnpkg.com/html-escaper/-/html-escaper-2.0.2.tgz#dfd60027da36a36dfcbe236262c00a5822681453" integrity sha512-H2iMtd0I4Mt5eYiapRdIDjp+XzelXQ0tFE4JS7YFwFevXXMmOp9myNrUvCg0D6ws8iqkRPBfKHgbwig1SmlLfg== -http-signature@~1.2.0: - version "1.2.0" - resolved "https://registry.yarnpkg.com/http-signature/-/http-signature-1.2.0.tgz#9aecd925114772f3d95b65a60abb8f7c18fbace1" - integrity sha512-CAbnr6Rz4CYQkLYUtSNXxQPUH2gK8f3iWexVlsnMeD+GjlsQ0Xsy1cOX+mN3dtxYomRy21CiOzU8Uhw6OwncEQ== - dependencies: - assert-plus "^1.0.0" - jsprim "^1.2.2" - sshpk "^1.7.0" - human-signals@^2.1.0: version "2.1.0" resolved "https://registry.yarnpkg.com/human-signals/-/human-signals-2.1.0.tgz#dc91fcba42e4d06e4abaed33b3e7a3c02f514ea0" @@ -1627,11 +1473,6 @@ is-stream@^2.0.0: resolved "https://registry.yarnpkg.com/is-stream/-/is-stream-2.0.1.tgz#fac1e3d53b97ad5a9d0ae9cef2389f5810a5c077" integrity sha512-hFoiJiTl63nn+kstHGBtewWSKnQLpyb155KHheA1l39uvtO9nWIop1p3udqPcUd/xbF1VLMO4n7OI6p7RbngDg== -is-typedarray@~1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/is-typedarray/-/is-typedarray-1.0.0.tgz#e479c80858df0c1b11ddda6940f96011fcda4a9a" - integrity sha512-cyA56iCMHAh5CdzjJIa4aohJyeO1YbwLi3Jc35MmRU6poroFjIGZzUzupGiRPOjgHg9TLu43xbpwXk523fMxKA== - isarray@1.0.0, isarray@~1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/isarray/-/isarray-1.0.0.tgz#bb935d48582cba168c06834957a54a3e07124f11" @@ -1654,11 +1495,6 @@ isobject@^3.0.1: resolved "https://registry.yarnpkg.com/isobject/-/isobject-3.0.1.tgz#4e431e92b11a9731636aa1f9c8d1ccbcfdab78df" integrity sha512-WhB9zCku7EGTj/HQQRz5aUQEUeoQZH2bWcltRErOpymJ4boYE6wL9Tbr23krRPSZ+C5zqNSrSw+Cc7sZZ4b7vg== -isstream@~0.1.2: - version "0.1.2" - resolved "https://registry.yarnpkg.com/isstream/-/isstream-0.1.2.tgz#47e63f7af55afa6f92e1500e690eb8b8529c099a" - integrity sha512-Yljz7ffyPbrLpLngrMtZ7NduUgVvi6wG9RJ9IUcyCd59YQ911PBJphODUcbOVbqYfxe1wuYf/LJ8PauMRwsM/g== - istanbul-lib-coverage@^3.0.0, istanbul-lib-coverage@^3.2.0: version "3.2.0" resolved "https://registry.yarnpkg.com/istanbul-lib-coverage/-/istanbul-lib-coverage-3.2.0.tgz#189e7909d0a39fa5a3dfad5b03f71947770191d3" @@ -2075,11 +1911,6 @@ js-yaml@^3.13.1, js-yaml@^3.8.1: argparse "^1.0.7" esprima "^4.0.0" -jsbn@~0.1.0: - version "0.1.1" - resolved "https://registry.yarnpkg.com/jsbn/-/jsbn-0.1.1.tgz#a5e654c2e5a2deb5f201d96cefbca80c0ef2f513" - integrity sha512-UVU9dibq2JcFWxQPA6KCqj5O42VOmAY3zQUfEKxU0KpTGXwNoCjkX1e13eHNvw/xPynt6pU0rZ1htjWTNTSXsg== - jsesc@^2.5.1: version "2.5.2" resolved "https://registry.yarnpkg.com/jsesc/-/jsesc-2.5.2.tgz#80564d2e483dacf6e8ef209650a67df3f0c283a4" @@ -2090,41 +1921,11 @@ json-parse-even-better-errors@^2.3.0: resolved "https://registry.yarnpkg.com/json-parse-even-better-errors/-/json-parse-even-better-errors-2.3.1.tgz#7c47805a94319928e05777405dc12e1f7a4ee02d" integrity sha512-xyFwyhro/JEof6Ghe2iz2NcXoj2sloNsWr/XsERDK/oiPCfaNhl5ONfp+jQdAZRQQ0IJWNzH9zIZF7li91kh2w== -json-schema-traverse@^0.4.1: - version "0.4.1" - resolved "https://registry.yarnpkg.com/json-schema-traverse/-/json-schema-traverse-0.4.1.tgz#69f6a87d9513ab8bb8fe63bdb0979c448e684660" - integrity sha512-xbbCH5dCYU5T8LcEhhuh7HJ88HXuW3qsI3Y0zOZFKfZEHcpWiHU/Jxzk629Brsab/mMiHQti9wMP+845RPe3Vg== - -json-schema@0.4.0: - version "0.4.0" - resolved "https://registry.yarnpkg.com/json-schema/-/json-schema-0.4.0.tgz#f7de4cf6efab838ebaeb3236474cbba5a1930ab5" - integrity sha512-es94M3nTIfsEPisRafak+HDLfHXnKBhV3vU5eqPcS3flIWqcxJWgXHXiey3YrpaNsanY5ei1VoYEbOzijuq9BA== - -json-stringify-safe@~5.0.1: - version "5.0.1" - resolved "https://registry.yarnpkg.com/json-stringify-safe/-/json-stringify-safe-5.0.1.tgz#1296a2d58fd45f19a0f6ce01d65701e2c735b6eb" - integrity sha512-ZClg6AaYvamvYEE82d3Iyd3vSSIjQ+odgjaTzRuO3s7toCdFKczob2i0zCh7JE8kWn17yvAWhUVxvqGwUalsRA== - json5@^2.2.1: version "2.2.1" resolved "https://registry.yarnpkg.com/json5/-/json5-2.2.1.tgz#655d50ed1e6f95ad1a3caababd2b0efda10b395c" integrity sha512-1hqLFMSrGHRHxav9q9gNjJ5EXznIxGVO09xQRrwplcS8qs28pZ8s8hupZAmqDwZUmVZ2Qb2jnyPOWcDH8m8dlA== -jsonparse@^1.2.0: - version "1.3.1" - resolved "https://registry.yarnpkg.com/jsonparse/-/jsonparse-1.3.1.tgz#3f4dae4a91fac315f71062f8521cc239f1366280" - integrity sha512-POQXvpdL69+CluYsillJ7SUhKvytYjW9vG/GKpnf+xP8UWgYEM/RaMzHHofbALDiKbbP1W8UEYmgGl39WkPZsg== - -jsprim@^1.2.2: - version "1.4.2" - resolved "https://registry.yarnpkg.com/jsprim/-/jsprim-1.4.2.tgz#712c65533a15c878ba59e9ed5f0e26d5b77c5feb" - integrity sha512-P2bSOMAc/ciLz6DzgjVlGJP9+BrJWu5UDGK70C2iweC5QBIeFf0ZXRvGjEj2uYgrY2MkAAhsSWHDWlFtEroZWw== - dependencies: - assert-plus "1.0.0" - extsprintf "1.3.0" - json-schema "0.4.0" - verror "1.10.0" - kind-of@^3.0.2: version "3.2.2" resolved "https://registry.yarnpkg.com/kind-of/-/kind-of-3.2.2.tgz#31ea21a734bab9bbb0f32466d893aea51e4a3c64" @@ -2206,11 +2007,6 @@ lodash.templatesettings@^4.0.0: dependencies: lodash._reinterpolate "^3.0.0" -lodash@4.17.21: - version "4.17.21" - resolved "https://registry.yarnpkg.com/lodash/-/lodash-4.17.21.tgz#679591c564c3bffaae8454cf0b3df370c3d6911c" - integrity sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg== - logform@^2.3.2, logform@^2.4.0: version "2.4.2" resolved "https://registry.yarnpkg.com/logform/-/logform-2.4.2.tgz#a617983ac0334d0c3b942c34945380062795b47c" @@ -2289,18 +2085,6 @@ micromatch@^4.0.4: braces "^3.0.2" picomatch "^2.3.1" -mime-db@1.52.0: - version "1.52.0" - resolved "https://registry.yarnpkg.com/mime-db/-/mime-db-1.52.0.tgz#bbabcdc02859f4987301c856e3387ce5ec43bf70" - integrity sha512-sPU4uV7dYlvtWJxwwxHD0PuihVNiE7TyAbQ5SWxDCB9mUYvOgroQOwYQQOKPJ8CIbE+1ETVlOoK1UC2nU3gYvg== - -mime-types@^2.1.12, mime-types@~2.1.19: - version "2.1.35" - resolved "https://registry.yarnpkg.com/mime-types/-/mime-types-2.1.35.tgz#381a871b62a734450660ae3deee44813f70d959a" - integrity sha512-ZDY+bPm5zTTF+YpCrAU9nK0UgICYPT0QtT1NZWFv4s++TNkcgVaT0g6+4R2uI4MjQjzysHB1zxuWL50hzaeXiw== - dependencies: - mime-db "1.52.0" - mimic-fn@^2.1.0: version "2.1.0" resolved "https://registry.yarnpkg.com/mimic-fn/-/mimic-fn-2.1.0.tgz#7ed2c2ccccaf84d3ffcb7a69b57711fc2083401b" @@ -2363,11 +2147,6 @@ npm-run-path@^4.0.1: dependencies: path-key "^3.0.0" -oauth-sign@~0.9.0: - version "0.9.0" - resolved "https://registry.yarnpkg.com/oauth-sign/-/oauth-sign-0.9.0.tgz#47a7b016baa68b5fa0ecf3dee08a85c679ac6455" - integrity sha512-fexhUFFPTGV8ybAtSIGbV6gOkSv8UtRbDBnAyLQw4QPKkgNlsH2ByPGtMUqdWkos6YCRmAqViwgZrJc/mRDzZQ== - object.pick@^1.2.0: version "1.3.0" resolved "https://registry.yarnpkg.com/object.pick/-/object.pick-1.3.0.tgz#87a10ac4c1694bd2e1cbf53591a66141fb5dd747" @@ -2452,11 +2231,6 @@ path-parse@^1.0.7: resolved "https://registry.yarnpkg.com/path-parse/-/path-parse-1.0.7.tgz#fbc114b60ca42b30d9daf5858e4bd68bbedb6735" integrity sha512-LDJzPVEEEPR+y48z93A0Ed0yXb8pAByGWo/k5YYdYgpY2/2EsOsksJrq7lOHxryrVOn1ejG6oAp8ahvOIQD8sw== -performance-now@^2.1.0: - version "2.1.0" - resolved "https://registry.yarnpkg.com/performance-now/-/performance-now-2.1.0.tgz#6309f4e0e5fa913ec1c69307ae364b4b377c9e7b" - integrity sha512-7EAHlyLHI56VEIdK57uwHdHKIaAGbnXPiw0yWbarQZOKaKpvUIgW0jWRVLiatnM+XXlSwsanIBH/hzGMJulMow== - picocolors@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/picocolors/-/picocolors-1.0.0.tgz#cb5bdc74ff3f51892236eaf79d68bc44564ab81c" @@ -2501,31 +2275,6 @@ prompts@^2.0.1: kleur "^3.0.3" sisteransi "^1.0.5" -psl@^1.1.24: - version "1.9.0" - resolved "https://registry.yarnpkg.com/psl/-/psl-1.9.0.tgz#d0df2a137f00794565fcaf3b2c00cd09f8d5a5a7" - integrity sha512-E/ZsdU4HLs/68gYzgGTkMicWTLPdAftJLfJFlLUAAKZGkStNU72sZjT66SnMDVOfOWY/YAoiD7Jxa9iHvngcag== - -punycode@^1.4.1: - version "1.4.1" - resolved "https://registry.yarnpkg.com/punycode/-/punycode-1.4.1.tgz#c0d5a63b2718800ad8e1eb0fa5269c84dd41845e" - integrity sha512-jmYNElW7yvO7TV33CjSmvSiE2yco3bV2czu/OzDKdMNVZQWfxCblURLhf+47syQRBntjfLdd/H0egrzIG+oaFQ== - -punycode@^2.1.0: - version "2.1.1" - resolved "https://registry.yarnpkg.com/punycode/-/punycode-2.1.1.tgz#b58b010ac40c22c5657616c8d2c2c02c7bf479ec" - integrity sha512-XRsRjdf+j5ml+y/6GKHPZbrF/8p2Yga0JPtdqTIY2Xe5ohJPD9saDJJLPvp9+NSBprVvevdXZybnj2cv8OEd0A== - -qs@~6.5.2: - version "6.5.3" - resolved "https://registry.yarnpkg.com/qs/-/qs-6.5.3.tgz#3aeeffc91967ef6e35c0e488ef46fb296ab76aad" - integrity sha512-qxXIEh4pCGfHICj1mAJQ2/2XVZkjCDTcEgfoSQxc/fYivUZxTkk7L3bDBJSoNrEzXI17oUO5Dp07ktqE5KzczA== - -querystring@0.2.0: - version "0.2.0" - resolved "https://registry.yarnpkg.com/querystring/-/querystring-0.2.0.tgz#b209849203bb25df820da756e747005878521620" - integrity sha512-X/xY82scca2tau62i9mDyU9K+I+djTMUsvwf7xnUX5GLvVzgJybOJf4Y6o9Zx3oJK/LSXg5tTZBjwzqVPaPO2g== - randomatic@^3.0.0: version "3.1.1" resolved "https://registry.yarnpkg.com/randomatic/-/randomatic-3.1.1.tgz#b776efc59375984e36c537b2f51a1f0aff0da1ed" @@ -2580,32 +2329,6 @@ repeat-string@^1.5.2, repeat-string@^1.6.1: resolved "https://registry.yarnpkg.com/repeat-string/-/repeat-string-1.6.1.tgz#8dcae470e1c88abc2d600fff4a776286da75e637" integrity sha512-PV0dzCYDNfRi1jCDbJzpW7jNNDRuCOG/jI5ctQcGKt/clZD+YcPS3yIlWuTJMmESC8aevCFmWJy5wjAFgNqN6w== -request@2.88.0: - version "2.88.0" - resolved "https://registry.yarnpkg.com/request/-/request-2.88.0.tgz#9c2fca4f7d35b592efe57c7f0a55e81052124fef" - integrity sha512-NAqBSrijGLZdM0WZNsInLJpkJokL72XYjUpnB0iwsRgxh7dB6COrHnTBNwN0E+lHDAJzu7kLAkDeY08z2/A0hg== - dependencies: - aws-sign2 "~0.7.0" - aws4 "^1.8.0" - caseless "~0.12.0" - combined-stream "~1.0.6" - extend "~3.0.2" - forever-agent "~0.6.1" - form-data "~2.3.2" - har-validator "~5.1.0" - http-signature "~1.2.0" - is-typedarray "~1.0.0" - isstream "~0.1.2" - json-stringify-safe "~5.0.1" - mime-types "~2.1.19" - oauth-sign "~0.9.0" - performance-now "^2.1.0" - qs "~6.5.2" - safe-buffer "^5.1.2" - tough-cookie "~2.4.3" - tunnel-agent "^0.6.0" - uuid "^3.3.2" - require-directory@^2.1.1: version "2.1.1" resolved "https://registry.yarnpkg.com/require-directory/-/require-directory-2.1.1.tgz#8c64ad5fd30dab1c976e2344ffe7f792a6a6df42" @@ -2637,26 +2360,21 @@ resolve@^1.20.0: path-parse "^1.0.7" supports-preserve-symlinks-flag "^1.0.0" -safe-buffer@^5.0.1, safe-buffer@^5.1.2, safe-buffer@~5.2.0: - version "5.2.1" - resolved "https://registry.yarnpkg.com/safe-buffer/-/safe-buffer-5.2.1.tgz#1eaf9fa9bdb1fdd4ec75f58f9cdb4e6b7827eec6" - integrity sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ== - safe-buffer@~5.1.0, safe-buffer@~5.1.1: version "5.1.2" resolved "https://registry.yarnpkg.com/safe-buffer/-/safe-buffer-5.1.2.tgz#991ec69d296e0313747d59bdfd2b745c35f8828d" integrity sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g== +safe-buffer@~5.2.0: + version "5.2.1" + resolved "https://registry.yarnpkg.com/safe-buffer/-/safe-buffer-5.2.1.tgz#1eaf9fa9bdb1fdd4ec75f58f9cdb4e6b7827eec6" + integrity sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ== + safe-stable-stringify@^2.3.1: version "2.4.1" resolved "https://registry.yarnpkg.com/safe-stable-stringify/-/safe-stable-stringify-2.4.1.tgz#34694bd8a30575b7f94792aa51527551bd733d61" integrity sha512-dVHE6bMtS/bnL2mwualjc6IxEv1F+OCUpA46pKUj6F8uDbUM0jCCulPqRNPSnWwGNKx5etqMjZYdXtrm5KJZGA== -safer-buffer@^2.0.2, safer-buffer@^2.1.0, safer-buffer@~2.1.0: - version "2.1.2" - resolved "https://registry.yarnpkg.com/safer-buffer/-/safer-buffer-2.1.2.tgz#44fa161b0187b9549dd84bb91802f9bd8385cd6a" - integrity sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg== - semver@7.x, semver@^7.3.5: version "7.3.8" resolved "https://registry.yarnpkg.com/semver/-/semver-7.3.8.tgz#07a78feafb3f7b32347d725e33de7e2a2df67798" @@ -2728,21 +2446,6 @@ sprintf-js@~1.0.2: resolved "https://registry.yarnpkg.com/sprintf-js/-/sprintf-js-1.0.3.tgz#04e6926f662895354f3dd015203633b857297e2c" integrity sha512-D9cPgkvLlV3t3IzL0D0YLvGA9Ahk4PcvVwUbN0dSGr1aP0Nrt4AEnTUbuGvquEC0mA64Gqt1fzirlRs5ibXx8g== -sshpk@^1.7.0: - version "1.17.0" - resolved "https://registry.yarnpkg.com/sshpk/-/sshpk-1.17.0.tgz#578082d92d4fe612b13007496e543fa0fbcbe4c5" - integrity sha512-/9HIEs1ZXGhSPE8X6Ccm7Nam1z8KcoCqPdI7ecm1N33EzAetWahvQWVqLZtaZQ+IDKX4IyA2o0gBzqIMkAagHQ== - dependencies: - asn1 "~0.2.3" - assert-plus "^1.0.0" - bcrypt-pbkdf "^1.0.0" - dashdash "^1.12.0" - ecc-jsbn "~0.1.1" - getpass "^0.1.1" - jsbn "~0.1.0" - safer-buffer "^2.0.2" - tweetnacl "~0.14.0" - stack-trace@0.0.x: version "0.0.10" resolved "https://registry.yarnpkg.com/stack-trace/-/stack-trace-0.0.10.tgz#547c70b347e8d32b4e108ea1a2a159e5fdde19c0" @@ -2755,11 +2458,6 @@ stack-utils@^2.0.3: dependencies: escape-string-regexp "^2.0.0" -stream2asynciter@1.0.3: - version "1.0.3" - resolved "https://registry.yarnpkg.com/stream2asynciter/-/stream2asynciter-1.0.3.tgz#7ba9046846c8b1caf36ec30d64a73514f7f44c5a" - integrity sha512-9/dEZW+LQjuW6ub5hmWi4n9Pn8W8qA8k7NAE1isecesA164e73xTdy1CJ3S9o9YS+O21HuiK7T+4uS7FgKDy4w== - string-length@^4.0.1: version "4.0.2" resolved "https://registry.yarnpkg.com/string-length/-/string-length-4.0.2.tgz#a8a8dc7bd5c1a82b9b3c8b87e125f66871b6e57a" @@ -2866,11 +2564,6 @@ through2@^2.0.0: readable-stream "~2.3.6" xtend "~4.0.1" -through@2.3.8, "through@>=2.2.7 <3": - version "2.3.8" - resolved "https://registry.yarnpkg.com/through/-/through-2.3.8.tgz#0dd4c9ffaabc357960b1b724115d7e0e86a2e1f5" - integrity sha512-w89qg7PI8wAdvX60bMDP+bFoD5Dvhm9oLheFp5O4a2QF0cSBGsBX4qZmadPMvVqlLJBBci+WqGGOAPvcDeNSVg== - tmpl@1.0.5: version "1.0.5" resolved "https://registry.yarnpkg.com/tmpl/-/tmpl-1.0.5.tgz#8683e0b902bb9c20c4f726e3c0b69f36518c07cc" @@ -2900,14 +2593,6 @@ toml@^2.3.2: resolved "https://registry.yarnpkg.com/toml/-/toml-2.3.6.tgz#25b0866483a9722474895559088b436fd11f861b" integrity sha512-gVweAectJU3ebq//Ferr2JUY4WKSDe5N+z0FvjDncLGyHmIDoxgY/2Ie4qfEIDm4IS7OA6Rmdm7pdEEdMcV/xQ== -tough-cookie@~2.4.3: - version "2.4.3" - resolved "https://registry.yarnpkg.com/tough-cookie/-/tough-cookie-2.4.3.tgz#53f36da3f47783b0925afa06ff9f3b165280f781" - integrity sha512-Q5srk/4vDM54WJsJio3XNn6K2sCG+CQ8G5Wz6bZhRZoAe/+TxjWB/GlFAnYEbkYVlON9FMk/fE3h2RLpPXo4lQ== - dependencies: - psl "^1.1.24" - punycode "^1.4.1" - triple-beam@^1.3.0: version "1.3.0" resolved "https://registry.yarnpkg.com/triple-beam/-/triple-beam-1.3.0.tgz#a595214c7298db8339eeeee083e4d10bd8cb8dd9" @@ -2946,23 +2631,6 @@ ts-node@^10.9.1: v8-compile-cache-lib "^3.0.1" yn "3.1.1" -tsv@0.2.0: - version "0.2.0" - resolved "https://registry.yarnpkg.com/tsv/-/tsv-0.2.0.tgz#92869a3cb5f50332f3dc90fca82be667db6f72d6" - integrity sha512-GG6xbOP85giXXom0dS6z9uyDsxktznjpa1AuDlPrIXDqDnbhjr9Vk6Us8iz6U1nENL4CPS2jZDvIjEdaZsmc4Q== - -tunnel-agent@^0.6.0: - version "0.6.0" - resolved "https://registry.yarnpkg.com/tunnel-agent/-/tunnel-agent-0.6.0.tgz#27a5dea06b36b04a0a9966774b290868f0fc40fd" - integrity sha512-McnNiV1l8RYeY8tBgEpuodCC1mLUdbSN+CYBL7kJsJNInOP8UjDDEwdk6Mw60vdLLrr5NHKZhMAOSrR2NZuQ+w== - dependencies: - safe-buffer "^5.0.1" - -tweetnacl@^0.14.3, tweetnacl@~0.14.0: - version "0.14.5" - resolved "https://registry.yarnpkg.com/tweetnacl/-/tweetnacl-0.14.5.tgz#5ae68177f192d4456269d108afa93ff8743f4f64" - integrity sha512-KXXFFdAbFXY4geFIwoyNK+f5Z1b7swfXABfL7HXCmoIWMKU3dmS26672A4EeQtDzLKy7SXmfBu51JolvEKwtGA== - type-detect@4.0.8: version "4.0.8" resolved "https://registry.yarnpkg.com/type-detect/-/type-detect-4.0.8.tgz#7646fb5f18871cfbb7749e69bd39a6388eb7450c" @@ -2991,23 +2659,11 @@ update-browserslist-db@^1.0.9: escalade "^3.1.1" picocolors "^1.0.0" -uri-js@^4.2.2: - version "4.4.1" - resolved "https://registry.yarnpkg.com/uri-js/-/uri-js-4.4.1.tgz#9b1a52595225859e55f669d928f88c6c57f2a77e" - integrity sha512-7rKUyy33Q1yc98pQ1DAmLtwX109F7TIfWlW1Ydo8Wl1ii1SeHieeh0HHfPeL2fMXK6z0s8ecKs9frCuLJvndBg== - dependencies: - punycode "^2.1.0" - util-deprecate@^1.0.1, util-deprecate@~1.0.1: version "1.0.2" resolved "https://registry.yarnpkg.com/util-deprecate/-/util-deprecate-1.0.2.tgz#450d4dc9fa70de732762fbd2d4a28981419a0ccf" integrity sha512-EPD5q1uXyFxJpCrLnCc1nHnq3gOa6DZBocAIiI2TaSCA7VCJ1UJDMagCzIkXNsUYfD1daK//LTEQ8xiIbrHtcw== -uuid@3.4.0, uuid@^3.3.2: - version "3.4.0" - resolved "https://registry.yarnpkg.com/uuid/-/uuid-3.4.0.tgz#b23e4358afa8a202fe7a100af1f5f883f02007ee" - integrity sha512-HjSDRw6gZE5JMggctHBcjVak08+KEVhSIiDzFnT9S9aegmp85S/bReBVTb4QTFaRNptJ9kuYaNhnbNEOkbKb/A== - v8-compile-cache-lib@^3.0.1: version "3.0.1" resolved "https://registry.yarnpkg.com/v8-compile-cache-lib/-/v8-compile-cache-lib-3.0.1.tgz#6336e8d71965cb3d35a1bbb7868445a7c05264bf" @@ -3022,15 +2678,6 @@ v8-to-istanbul@^9.0.1: "@types/istanbul-lib-coverage" "^2.0.1" convert-source-map "^1.6.0" -verror@1.10.0: - version "1.10.0" - resolved "https://registry.yarnpkg.com/verror/-/verror-1.10.0.tgz#3a105ca17053af55d6e270c1f8288682e18da400" - integrity sha512-ZZKSmDAEFOijERBLkmYfJ+vmk3w+7hOLYDNkRCuRuMJGEmqYNCNLyBBFwWKVMhfwaEF3WOd0Zlw86U/WC/+nYw== - dependencies: - assert-plus "^1.0.0" - core-util-is "1.0.2" - extsprintf "^1.2.0" - walker@^1.0.8: version "1.0.8" resolved "https://registry.yarnpkg.com/walker/-/walker-1.0.8.tgz#bd498db477afe573dc04185f011d3ab8a8d7653f"