Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
100 changes: 100 additions & 0 deletions .github/workflows/integration-tests.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,100 @@
name: Integration Tests

on:
pull_request:
branches: [main]
paths-ignore:
- "**.md"
- "docs/**"
- "LICENSE*"
- "**/*.png"
- "**/*.jpg"
- "**/*.svg"
workflow_dispatch:

concurrency:
group: ${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: true

env:
CARGO_TERM_COLOR: always
RUST_BACKTRACE: 1

jobs:
discover:
name: Discover integration suites
runs-on: ubuntu-latest
outputs:
suites: ${{ steps.suites.outputs.suites }}
steps:
- name: Checkout code
uses: actions/checkout@v4
with:
submodules: true

- name: Get integration suites from script
id: suites
working-directory: dt-tests
run: |
suites_json="$(./scripts/run-integration-tests.sh --list-suites-json)"
echo "suites=${suites_json}" >> "$GITHUB_OUTPUT"
echo "Integration suites JSON: ${suites_json}"

- name: Print integration suites
run: echo '${{ steps.suites.outputs.suites }}'

integration-test:
name: "Integration / ${{ matrix.suite }}"
needs: discover
runs-on: ubuntu-latest
timeout-minutes: 90

strategy:
fail-fast: false
matrix:
suite: ${{ fromJSON(needs.discover.outputs.suites) }}

steps:
- name: Checkout code
uses: actions/checkout@v4
with:
submodules: true

- name: Start required services
working-directory: dt-tests
run: ./scripts/run-integration-tests.sh --suite "${{ matrix.suite }}" --up --wait --keep-docker

- name: Setup Rust toolchain
uses: dtolnay/rust-toolchain@stable

- name: Setup Rust cache
uses: Swatinem/rust-cache@v2
with:
key: integration-${{ runner.os }}-${{ hashFiles('**/Cargo.lock') }}

- name: Install cargo-nextest
uses: taiki-e/install-action@v2
with:
tool: cargo-nextest

- name: Run integration tests
working-directory: dt-tests
run: ./scripts/run-integration-tests.sh --suite "${{ matrix.suite }}" --test --runner nextest --keep-docker

- name: Dump container logs on failure
if: failure()
working-directory: dt-tests
run: ./scripts/run-integration-tests.sh --suite "${{ matrix.suite }}" --logs --keep-docker

- name: Upload integration logs
if: always()
uses: actions/upload-artifact@v4
with:
name: integration-logs-${{ matrix.suite }}
path: tmp/integration-logs
if-no-files-found: ignore

- name: Stop services
if: always()
working-directory: dt-tests
run: ./scripts/run-integration-tests.sh --suite "${{ matrix.suite }}" --down
7 changes: 5 additions & 2 deletions dt-common/src/meta/adaptor/mysql_col_value_convertor.rs
Original file line number Diff line number Diff line change
@@ -1,6 +1,9 @@
use std::io::Cursor;

use crate::{config::config_enums::DbType, error::Error, meta::time::dt_utc_time::DtNaiveTime};
use crate::{
config::config_enums::DbType, error::Error, meta::time::dt_utc_time::DtNaiveTime,
utils::sql_util::SqlUtil,
};
use anyhow::bail;
use byteorder::{LittleEndian, ReadBytesExt};
use chrono::{TimeZone, Utc};
Expand Down Expand Up @@ -461,7 +464,7 @@ impl MysqlColValueConvertor {
| MysqlColType::MediumText { .. }
| MysqlColType::Text { .. }
| MysqlColType::LongText { .. } => {
let value: String = row.try_get(col)?;
let value = SqlUtil::try_get_mysql_string(row, col)?;
Ok(ColValue::String(value))
}

Expand Down
24 changes: 20 additions & 4 deletions dt-common/src/meta/avro/avro_converter.rs
Original file line number Diff line number Diff line change
@@ -1,6 +1,5 @@
use std::{collections::HashMap, str::FromStr};

use anyhow::Ok;
use apache_avro::{from_avro_datum, to_avro_datum, types::Value, Schema};

use crate::{
Expand Down Expand Up @@ -309,9 +308,11 @@ impl AvroConverter {

ColValue::Float(v) => Value::Double(*v as f64),
ColValue::Double(v) => Value::Double(*v),
ColValue::Blob(v) | ColValue::Json(v) | ColValue::RawString(v) => {
Value::Bytes(v.clone())
}
ColValue::Blob(v) | ColValue::Json(v) => Value::Bytes(v.clone()),
ColValue::RawString(v) => ColValue::RawString(v.clone())
.to_utf8_string()
.map(Value::String)
.unwrap_or_else(|| Value::Bytes(v.clone())),

ColValue::Decimal(v)
| ColValue::Time(v)
Expand Down Expand Up @@ -435,6 +436,21 @@ mod tests {
validate_ddl_data(&mut avro_converter, &ddl_data).await;
}

#[test]
fn test_avro_raw_string_round_trip() {
let utf8_raw = ColValue::RawString(b"mn".to_vec());
assert_eq!(
ColValue::String("mn".to_string()),
AvroConverter::avro_to_col_value(AvroConverter::col_value_to_avro(&utf8_raw))
);

let binary_raw = ColValue::RawString(vec![0xff, 0xfe]);
assert_eq!(
ColValue::Blob(vec![0xff, 0xfe]),
AvroConverter::avro_to_col_value(AvroConverter::col_value_to_avro(&binary_raw))
);
}

async fn validate_row_data(avro_converter: &mut AvroConverter, row_data: &RowData) {
let payload = avro_converter
.row_data_to_avro_value(row_data)
Expand Down
38 changes: 38 additions & 0 deletions dt-common/src/meta/col_value.rs
Original file line number Diff line number Diff line change
Expand Up @@ -261,6 +261,23 @@ impl ColValue {
}
}

pub fn to_utf8_string(&self) -> Option<String> {
match self {
ColValue::RawString(v) => String::from_utf8(v.clone()).ok(),
ColValue::String(v) => Some(v.clone()),
_ => None,
}
}

pub fn to_utf8_or_hex_string(&self) -> Option<String> {
match self {
ColValue::RawString(v) => {
Some(String::from_utf8(v.clone()).unwrap_or_else(|_| hex::encode(v)))
}
_ => self.to_option_string(),
}
}

pub fn is_unchanged_toast(&self) -> bool {
matches!(self, ColValue::UnchangedToast)
}
Expand Down Expand Up @@ -482,4 +499,25 @@ mod tests {
fn test_tagged_col_value_def_is_exposed_from_meta() {
let _ = std::any::type_name::<MetaTaggedColValueDef>();
}

#[test]
fn test_raw_string_string_helpers() {
assert_eq!(
ColValue::RawString(b"ij".to_vec()).to_option_string(),
Some("696a".to_string())
);
assert_eq!(
ColValue::RawString(b"ij".to_vec()).to_utf8_string(),
Some("ij".to_string())
);
assert_eq!(
ColValue::RawString(b"ij".to_vec()).to_utf8_or_hex_string(),
Some("ij".to_string())
);
assert_eq!(ColValue::RawString(vec![0xff, 0xfe]).to_utf8_string(), None);
assert_eq!(
ColValue::RawString(vec![0xff, 0xfe]).to_utf8_or_hex_string(),
Some("fffe".to_string())
);
}
}
6 changes: 4 additions & 2 deletions dt-common/src/meta/mysql/mysql_meta_fetcher.rs
Original file line number Diff line number Diff line change
Expand Up @@ -160,7 +160,8 @@ impl MysqlMetaFetcher {
col_origin_type_map.insert(col.clone(), origin_type);
col_type_map.insert(col.clone(), col_type);

let is_nullable = row.try_get::<String, _>(IS_NULLABLE)?.to_lowercase() == "yes";
let is_nullable =
SqlUtil::try_get_mysql_string(&row, IS_NULLABLE)?.to_lowercase() == "yes";
if is_nullable {
nullable_cols.insert(col);
}
Expand Down Expand Up @@ -327,6 +328,7 @@ impl MysqlMetaFetcher {
tb: &str,
) -> anyhow::Result<HashMap<String, Vec<String>>> {
let mut key_map: HashMap<String, Vec<String>> = HashMap::new();
// let mut prefixed_keys = HashSet::new();
let sql = format!("SHOW INDEXES FROM `{}`.`{}`", schema, tb);
let mut rows = sqlx::raw_sql(&sql).fetch(conn_pool);
while let Some(row) = rows.try_next().await? {
Expand Down Expand Up @@ -430,7 +432,7 @@ impl MysqlMetaFetcher {
let sql = "SELECT VERSION()";
let mut rows = sqlx::raw_sql(sql).fetch(&self.conn_pool);
if let Some(row) = rows.try_next().await? {
let version: String = row.get_unchecked(0);
let version = SqlUtil::try_get_mysql_string(&row, 0)?;
self.version = version.trim().into();
return Ok(());
}
Expand Down
30 changes: 29 additions & 1 deletion dt-common/src/meta/row_data.rs
Original file line number Diff line number Diff line change
Expand Up @@ -215,7 +215,7 @@ impl RowData {
let mut str_col_values: HashMap<String, ColValue> = HashMap::new();
for (col, col_value) in col_values.iter() {
if let ColValue::RawString(_) = col_value {
if let Some(str) = col_value.to_option_string() {
if let Some(str) = col_value.to_utf8_or_hex_string() {
str_col_values.insert(col.into(), ColValue::String(str));
} else {
str_col_values.insert(col.to_owned(), ColValue::None);
Expand Down Expand Up @@ -295,3 +295,31 @@ impl RowData {
size
}
}

#[cfg(test)]
mod tests {
use std::collections::HashMap;

use super::*;

#[test]
fn test_convert_raw_string_prefers_utf8() {
let mut row_data = RowData::new(
"db".to_string(),
"tb".to_string(),
RowType::Insert,
None,
Some(HashMap::from([(
"c1".to_string(),
ColValue::RawString(b"ij".to_vec()),
)])),
);

row_data.convert_raw_string();

assert_eq!(
row_data.require_after().unwrap().get("c1"),
Some(&ColValue::String("ij".to_string()))
);
}
}
23 changes: 23 additions & 0 deletions dt-connector/src/extractor/pg/pg_cdc_client.rs
Original file line number Diff line number Diff line change
Expand Up @@ -138,6 +138,10 @@ impl PgCdcClient {
match key.as_ref() {
"sslmode" => ssl_mode = Some(Self::parse_url_ssl_mode(value.as_ref())?),
"sslrootcert" => ssl_ca_path = Some(value.into_owned()),
// Replication connections are parsed by tokio-postgres directly and
// do not understand app-layer wrapped options like
// `options[statement_timeout]=10s`.
k if Self::should_strip_replication_query_param(k) => {}
_ => other_pairs.push((key.into_owned(), value.into_owned())),
}
}
Expand All @@ -152,6 +156,10 @@ impl PgCdcClient {
Ok((parsed.to_string(), ssl_config))
}

fn should_strip_replication_query_param(key: &str) -> bool {
matches!(key, "options") || key.starts_with("options[")
}

fn parse_url_ssl_mode(value: &str) -> anyhow::Result<SslMode> {
match value {
"disable" => Ok(SslMode::Disable),
Expand Down Expand Up @@ -377,4 +385,19 @@ mod tests {

assert_eq!(ssl_config.ssl_mode, SslMode::Disable);
}

#[test]
fn build_replication_config_strips_wrapped_options_params() {
let client = build_client(
"postgres://url_user:url_pass@localhost:5432/test_db?options[statement_timeout]=10s",
ConnectionAuthConfig::NoAuth,
);

let (config, ssl_config) = client.build_replication_config().unwrap();

assert_eq!(ssl_config.ssl_mode, SslMode::Disable);
assert_eq!(config.get_user(), Some("url_user"));
assert_eq!(config.get_password(), Some("url_pass".as_bytes()));
assert_eq!(config.get_dbname(), Some("test_db"));
}
}
29 changes: 25 additions & 4 deletions dt-connector/src/rdb_query_builder.rs
Original file line number Diff line number Diff line change
Expand Up @@ -753,16 +753,37 @@ impl RdbQueryBuilder<'_> {
| ColValue::Long(_)
| ColValue::UnsignedLong(_)
| ColValue::LongLong(_)
| ColValue::UnsignedLongLong(_)
| ColValue::Float(_)
| ColValue::Double(_)
| ColValue::Decimal(_) => col_value
| ColValue::UnsignedLongLong(_) => col_value
.to_option_string()
.unwrap_or_else(|| "NULL".to_string()),
ColValue::Decimal(v) => Self::format_pg_decimal_literal(v),
ColValue::Float(v) => Self::format_pg_float_literal((*v).into()),
ColValue::Double(v) => Self::format_pg_float_literal(*v),
_ => Self::quote_pg_string_literal(col_value),
}
}

fn format_pg_float_literal(value: f64) -> String {
if value.is_nan() {
"'NaN'".to_string()
} else if value.is_infinite() {
if value.is_sign_positive() {
"'Infinity'".to_string()
} else {
"'-Infinity'".to_string()
}
} else {
value.to_string()
}
}

fn format_pg_decimal_literal(value: &str) -> String {
match value {
"NaN" | "Infinity" | "-Infinity" => format!("'{}'", value),
_ => value.to_string(),
}
}

fn quote_pg_string_literal(col_value: &ColValue) -> String {
if let Some(string) = col_value.to_option_string() {
format!(r#"'{}'"#, string.replace('\'', "''"))
Expand Down
Loading
Loading