diff --git a/.gitignore b/.gitignore index 3f072cfb..db7d71b1 100644 --- a/.gitignore +++ b/.gitignore @@ -30,3 +30,6 @@ run.out clickhouse/etc_sudoers.bak workdir/ timeout-exit-codes.out +target/ +.idea/ +Cargo.lock diff --git a/polars/groupby/Cargo.toml b/polars/groupby/Cargo.toml new file mode 100644 index 00000000..0197f7aa --- /dev/null +++ b/polars/groupby/Cargo.toml @@ -0,0 +1,15 @@ +[package] +name = "polars-groupby" +version = "0.1.0" +authors = ["ritchie46 "] +edition = "2018" + +# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html + +[dependencies] +polars = {path= "/home/ritchie46/code/polars/polars", features = ["simd"]} +snmalloc-rs = {version = "0.2", features= ["cache-friendly"]} + +[profile.release] +lto = true +codegen-units = 1 diff --git a/polars/groupby/exec.sh b/polars/groupby/exec.sh new file mode 100755 index 00000000..00a2a0cf --- /dev/null +++ b/polars/groupby/exec.sh @@ -0,0 +1,4 @@ +#!/bin/bash +set -e + +RUSTFLAGS='-C target-cpu=native' cargo run +nighlty --release diff --git a/polars/groupby/src/main.rs b/polars/groupby/src/main.rs new file mode 100644 index 00000000..21cd5b1a --- /dev/null +++ b/polars/groupby/src/main.rs @@ -0,0 +1,514 @@ +use polars::lazy::functions::pearson_corr; +use polars::prelude::*; +use std::time::Instant; + +#[global_allocator] +static ALLOC: snmalloc_rs::SnMalloc = snmalloc_rs::SnMalloc; + +macro_rules! run_query { + ($name: expr, $query: block) => {{ + let t = Instant::now(); + let (ans, chk) = $query; + println!("{} took {} ms", $name, t.elapsed().as_millis()); + println!("ans = {:?}\n chk = {:?}", ans, chk); + (ans, chk) + }}; +} + +fn main() -> Result<()> { + let path = format!("../data/{}.csv", std::env::var("SRC_DATANAME").unwrap()); + + let overwrite_schema = Schema::new(vec![ + Field::new("id4", DataType::Int32), + Field::new("id5", DataType::Int32), + Field::new("id6", DataType::Int32), + Field::new("v1", DataType::Int32), + Field::new("v2", DataType::Int32), + Field::new("v3", DataType::Float64), + ]); + let mut df = CsvReader::from_path(&path)? + .with_dtype_overwrite(Some(&overwrite_schema)) + .finish()?; + df.may_apply("id1", |s| s.cast::())?; + df.may_apply("id2", |s| s.cast::())?; + df.may_apply("id3", |s| s.cast::())?; + + let q1 = || { + df.clone() + .lazy() + .groupby(vec![col("id1")]) + .agg(vec![col("v1").sum().alias("v1")]) + .collect() + }; + let _ = run_query!("q1", { + let ans = q1()?; + let chk = (ans.column("v1")?.sum::().unwrap(),); + (ans, chk) + }); + let _ = run_query!("q1", { + let ans = q1()?; + let chk = (ans.column("v1")?.sum::().unwrap(),); + (ans, chk) + }); + + let q2 = || { + df.clone() + .lazy() + .groupby(vec![col("id1"), col("id2")]) + .agg(vec![col("v1").sum().alias("v1")]) + .collect() + }; + + let _ = run_query!("q2", { + let ans = q2()?; + let chk = (ans.column("v1")?.sum::().unwrap(),); + (ans, chk) + }); + let _ = run_query!("q2", { + let ans = q2()?; + let chk = (ans.column("v1")?.sum::().unwrap(),); + (ans, chk) + }); + + let q3 = || { + df.clone() + .lazy() + .groupby(vec![col("id3")]) + .agg(vec![ + col("v1").sum().alias("v1_sum"), + col("v3").mean().alias("v3_mean"), + ]) + .collect() + }; + + let _ = run_query!("q3", { + let ans = q3()?; + let checks = ans + .clone() + .lazy() + .select(vec![col("v1_sum").sum(), col("v3_mean").sum()]) + .collect()?; + let sum_v1 = checks + .select_at_idx(0) + .unwrap() + .cast::()? + .f32()? + .get(0) + .unwrap(); + let sum_mean_v3 = checks + .select_at_idx(1) + .unwrap() + .cast::()? + .f32()? + .get(0) + .unwrap(); + let chk = (sum_v1, sum_mean_v3); + (ans, chk) + }); + + let _ = run_query!("q3", { + let ans = q3()?; + let checks = ans + .clone() + .lazy() + .select(vec![col("v1_sum").sum(), col("v3_mean").sum()]) + .collect()?; + let sum_v1 = checks + .select_at_idx(0) + .unwrap() + .cast::()? + .f32()? + .get(0) + .unwrap(); + let sum_mean_v3 = checks + .select_at_idx(1) + .unwrap() + .cast::()? + .f32()? + .get(0) + .unwrap(); + let chk = (sum_v1, sum_mean_v3); + (ans, chk) + }); + + let q4 = || { + df.clone() + .lazy() + .groupby(vec![col("id4")]) + .agg(vec![ + col("v1").mean().alias("v1_mean"), + col("v2").mean().alias("v2_mean"), + col("v3").mean().alias("v3_mean"), + ]) + .collect() + }; + + let _ = run_query!("q4", { + let ans = q4()?; + let checks = ans + .clone() + .lazy() + .select(vec![ + col("v1_mean").sum(), + col("v2_mean").sum(), + col("v3_mean").sum(), + ]) + .collect()?; + let sum_mean_v1 = checks + .select_at_idx(0) + .unwrap() + .cast::()? + .f32()? + .get(0) + .unwrap(); + let sum_mean_v2 = checks + .select_at_idx(0) + .unwrap() + .cast::()? + .f32()? + .get(0) + .unwrap(); + let sum_mean_v3 = checks + .select_at_idx(1) + .unwrap() + .cast::()? + .f32()? + .get(0) + .unwrap(); + let chk = (sum_mean_v1, sum_mean_v2, sum_mean_v3); + (ans, chk) + }); + + let _ = run_query!("q4", { + let ans = q4()?; + let checks = ans + .clone() + .lazy() + .select(vec![ + col("v1_mean").sum(), + col("v2_mean").sum(), + col("v3_mean").sum(), + ]) + .collect()?; + let sum_mean_v1 = checks + .select_at_idx(0) + .unwrap() + .cast::()? + .f32()? + .get(0) + .unwrap(); + + let sum_mean_v2 = checks + .select_at_idx(1) + .unwrap() + .cast::()? + .f32()? + .get(0) + .unwrap(); + let sum_mean_v3 = checks + .select_at_idx(2) + .unwrap() + .cast::()? + .f32()? + .get(0) + .unwrap(); + let chk = (sum_mean_v1, sum_mean_v2, sum_mean_v3); + (ans, chk) + }); + + let q5 = || { + df.clone() + .lazy() + .groupby(vec![col("id6")]) + .agg(vec![ + col("v1").sum().alias("v1_sum"), + col("v2").sum().alias("v2_sum"), + col("v3").sum().alias("v3_sum"), + ]) + .collect() + }; + + let _ = run_query!("q5", { + let ans = q5()?; + let checks = ans + .clone() + .lazy() + .select(vec![ + col("v1_sum").sum(), + col("v2_sum").sum(), + col("v3_sum").sum(), + ]) + .collect()?; + let sum_sum_v1 = checks + .select_at_idx(0) + .unwrap() + .cast::()? + .f32()? + .get(0) + .unwrap(); + let sum_sum_v2 = checks + .select_at_idx(1) + .unwrap() + .cast::()? + .f32()? + .get(0) + .unwrap(); + let sum_sum_v3 = checks + .select_at_idx(2) + .unwrap() + .cast::()? + .f32()? + .get(0) + .unwrap(); + let chk = (sum_sum_v1, sum_sum_v2, sum_sum_v3); + (ans, chk) + }); + + let _ = run_query!("q5", { + let ans = q5()?; + let checks = ans + .clone() + .lazy() + .select(vec![ + col("v1_sum").sum(), + col("v2_sum").sum(), + col("v3_sum").sum(), + ]) + .collect()?; + let sum_sum_v1 = checks + .select_at_idx(0) + .unwrap() + .cast::()? + .f32()? + .get(0) + .unwrap(); + let sum_sum_v2 = checks + .select_at_idx(1) + .unwrap() + .cast::()? + .f32()? + .get(0) + .unwrap(); + let sum_sum_v3 = checks + .select_at_idx(2) + .unwrap() + .cast::()? + .f32()? + .get(0) + .unwrap(); + let chk = (sum_sum_v1, sum_sum_v2, sum_sum_v3); + (ans, chk) + }); + + let q6 = || { + df.clone() + .lazy() + .groupby(vec![col("id4"), col("id5")]) + .agg(vec![ + col("v3").median().alias("v3_median"), + col("v3").std().alias("v3_std"), + ]) + .collect() + }; + + let _ = run_query!("q6", { + let ans = q6()?; + let checks = ans + .clone() + .lazy() + .select(vec![col("v3_median").sum(), col("v3_std").sum()]) + .collect()?; + let sum_v3_median = checks + .select_at_idx(0) + .unwrap() + .cast::()? + .f32()? + .get(0) + .unwrap(); + let sum_v3_std = checks + .select_at_idx(0) + .unwrap() + .cast::()? + .f32()? + .get(0) + .unwrap(); + let chk = (sum_v3_median, sum_v3_std); + (ans, chk) + }); + + let _ = run_query!("q6", { + let ans = q6()?; + let checks = ans + .clone() + .lazy() + .select(vec![col("v3_median").sum(), col("v3_std").sum()]) + .collect()?; + let sum_v3_median = checks + .select_at_idx(0) + .unwrap() + .cast::()? + .f32()? + .get(0) + .unwrap(); + let sum_v3_std = checks + .select_at_idx(0) + .unwrap() + .cast::()? + .f32()? + .get(0) + .unwrap(); + let chk = (sum_v3_median, sum_v3_std); + (ans, chk) + }); + + let q7 = || { + df.clone() + .lazy() + .groupby(vec![col("id3")]) + .agg(vec![ + col("v1").max().alias("v1"), + col("v2").min().alias("v2"), + ]) + .select(vec![ + col("id3"), + (col("v1") - col("v2")).alias("range_v1_v2"), + ]) + .collect() + }; + + let _ = run_query!("q7", { + let ans = q7()?; + + let chk = (ans.column("range_v1_v2")?.sum::().unwrap(),); + (ans, chk) + }); + + let _ = run_query!("q7", { + let ans = q7()?; + + let chk = (ans.column("range_v1_v2")?.sum::().unwrap(),); + (ans, chk) + }); + + let q8 = || { + df.clone() + .lazy() + .drop_nulls(Some(vec![col("v3")])) + .sort("v3", true) + .groupby(vec![col("id6")]) + .agg(vec![col("v3").head(Some(2)).alias("v3_top_2")]) + .explode(&[col("v3_top_2")]) + .collect() + }; + + let _ = run_query!("q8", { + let ans = q8()?; + + let chk = (ans.column("v3_top_2")?.sum::().unwrap(),); + (ans, chk) + }); + + let _ = run_query!("q8", { + let ans = q8()?; + + let chk = (ans.column("v3_top_2")?.sum::().unwrap(),); + (ans, chk) + }); + + let q9 = || { + df.clone() + .lazy() + .drop_nulls(Some(vec![col("v1"), col("v2")])) + .groupby(vec![col("id2"), col("id4")]) + .agg(vec![pearson_corr(col("v1"), col("v2")) + .pow(2.0) + .alias("r2")]) + .collect() + }; + let _ = run_query!("q9", { + let ans = q9()?; + + let chk = (ans.column("r2")?.sum::().unwrap(),); + (ans, chk) + }); + + let _ = run_query!("q9", { + let ans = q9()?; + + let chk = (ans.column("r2")?.sum::().unwrap(),); + (ans, chk) + }); + + let q10 = || { + df.clone() + .lazy() + .groupby(vec![ + col("id1"), + col("id2"), + col("id3"), + col("id4"), + col("id5"), + col("id6"), + ]) + .agg(vec![ + col("v3").sum().alias("v3"), + col("v1").count().alias("v1"), + ]) + .collect() + }; + let _ = run_query!("q10", { + let ans = q10()?; + + let checks = ans + .clone() + .lazy() + .select(vec![col("v3").sum(), col("v1").sum()]) + .collect()?; + let sum_v3 = checks + .select_at_idx(0) + .unwrap() + .cast::()? + .f32()? + .get(0) + .unwrap(); + let sum_v1 = checks + .select_at_idx(1) + .unwrap() + .cast::()? + .f32()? + .get(0) + .unwrap(); + let chk = (sum_v3, sum_v1); + + (ans, chk) + }); + + let _ = run_query!("q10", { + let ans = q10()?; + + let checks = ans + .clone() + .lazy() + .select(vec![col("v3").sum(), col("v1").sum()]) + .collect()?; + let sum_v3 = checks + .select_at_idx(0) + .unwrap() + .cast::()? + .f32()? + .get(0) + .unwrap(); + let sum_v1 = checks + .select_at_idx(1) + .unwrap() + .cast::()? + .f32()? + .get(0) + .unwrap(); + let chk = (sum_v3, sum_v1); + + (ans, chk) + }); + + Ok(()) +} diff --git a/polars/join/Cargo.toml b/polars/join/Cargo.toml new file mode 100644 index 00000000..81b1b305 --- /dev/null +++ b/polars/join/Cargo.toml @@ -0,0 +1,15 @@ +[package] +name = "polars-join" +version = "0.1.0" +authors = ["ritchie46 "] +edition = "2018" + +# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html + +[dependencies] +polars = {path= "/home/ritchie46/code/polars/polars", features = ["simd"]} +snmalloc-rs = {version = "0.2", features= ["cache-friendly"]} + +[profile.release] +lto = true +codegen-units = 1 diff --git a/polars/join/exec.sh b/polars/join/exec.sh new file mode 100755 index 00000000..40f8ec48 --- /dev/null +++ b/polars/join/exec.sh @@ -0,0 +1,17 @@ +#!/bin/bash +set -e + +# code snippet from _helpers/ used to determine right join table names. +data_name=${SRC_DATANAME} +x_n="$(echo $data_name | cut -d '_' -f 2)" +x_n_lhs="$(echo $x_n | cut -d 'e' -f 1)" +if [ "$x_n_lhs" -ne 1 ]; then + echo "data_name $data_name must have '1' base in exponential notation for number of rows" >&2 && eit 1 +fi +x_n_rhs="$(echo $x_n | cut -d "e" -f 2)" +if [ "$x_n_rhs" -lt 6 ]; then + echo "data_name $data_name must have exponent greater or equal to '6' in exponential notation for number of rows" >&2 && exit 1 +fi + +RUSTFLAGS='-C target-cpu=native' cargo run +nighlty --release ${data_name/NA/"$x_n_lhs"e"$(($x_n_rhs-6))"} ${data_name/NA/"$x_n_lhs"e"$(($x_n_rhs-3))"} ${data_name/NA/"$x_n_lhs"e"$x_n_rhs"} + diff --git a/polars/join/src/main.rs b/polars/join/src/main.rs new file mode 100644 index 00000000..cc1fa702 --- /dev/null +++ b/polars/join/src/main.rs @@ -0,0 +1,233 @@ +use polars::prelude::*; +use polars::toggle_string_cache; +use std::time::Instant; + +#[global_allocator] +static ALLOC: snmalloc_rs::SnMalloc = snmalloc_rs::SnMalloc; + +macro_rules! run_query { + ($name: expr, $query: block) => {{ + let t = Instant::now(); + let (ans, chk) = $query; + println!("{} took {} ms", $name, t.elapsed().as_millis()); + println!("ans = {:?}\n chk = {:?}", ans, chk); + (ans, chk) + }}; +} + +fn main() -> Result<()> { + toggle_string_cache(true); + + // join tables should be passed as arguments + let args: Vec = std::env::args() + .map(|s| format!("../data/{}.csv", s)) + .collect(); + dbg!(&args); + + let path = format!("../data/{}.csv", std::env::var("SRC_DATANAME").unwrap()); + + let overwrite_schema = Schema::new(vec![ + Field::new("id1", DataType::Int32), + Field::new("id2", DataType::Int32), + Field::new("id3", DataType::Int32), + Field::new("v1", DataType::Float64), + ]); + let mut x = CsvReader::from_path(&path)? + .with_dtype_overwrite(Some(&overwrite_schema)) + .finish()?; + + x.may_apply("id4", |s| s.cast::())?; + x.may_apply("id5", |s| s.cast::())?; + x.may_apply("id6", |s| s.cast::())?; + + let overwrite_schema = Schema::new(vec![ + Field::new("id1", DataType::Int32), + Field::new("v2", DataType::Float64), + ]); + let mut small = CsvReader::from_path(&args[1])? + .with_dtype_overwrite(Some(&overwrite_schema)) + .finish()?; + small.may_apply("id4", |s| s.cast::())?; + + let overwrite_schema = Schema::new(vec![ + Field::new("id1", DataType::Int32), + Field::new("id2", DataType::Int32), + Field::new("v2", DataType::Float64), + ]); + let mut medium = CsvReader::from_path(&args[1])? + .with_dtype_overwrite(Some(&overwrite_schema)) + .finish()?; + medium.may_apply("id4", |s| s.cast::())?; + medium.may_apply("id5", |s| s.cast::())?; + + let overwrite_schema = Schema::new(vec![ + Field::new("id1", DataType::Int32), + Field::new("id2", DataType::Int32), + Field::new("id3", DataType::Int32), + Field::new("v2", DataType::Float64), + ]); + let mut big = CsvReader::from_path(&args[1])? + .with_dtype_overwrite(Some(&overwrite_schema)) + .finish()?; + big.may_apply("id4", |s| s.cast::())?; + big.may_apply("id5", |s| s.cast::())?; + big.may_apply("id6", |s| s.cast::())?; + + // clear string cache from memory + toggle_string_cache(false); + + dbg!( + x.height(), + small.height(), + medium.height(), + big.height(), + "joining..." + ); + + for _ in 0..2 { + let _ = run_query!("q1", { + let ans = x.inner_join(&small, "id1", "id1")?; + + let checks = ans + .clone() + .lazy() + .select(vec![col("v1").sum(), col("v2").sum()]) + .collect()?; + let sum_v1 = checks + .select_at_idx(0) + .unwrap() + .cast::()? + .f32()? + .get(0) + .unwrap(); + let sum_v2 = checks + .select_at_idx(0) + .unwrap() + .cast::()? + .f32()? + .get(0) + .unwrap(); + + let chk = (sum_v1, sum_v2); + (ans, chk) + }); + } + + for _ in 0..2 { + let _ = run_query!("q2", { + let ans = x.inner_join(&medium, "id2", "id2")?; + + let checks = ans + .clone() + .lazy() + .select(vec![col("v1").sum(), col("v2").sum()]) + .collect()?; + let sum_v1 = checks + .select_at_idx(0) + .unwrap() + .cast::()? + .f32()? + .get(0) + .unwrap(); + let sum_v2 = checks + .select_at_idx(0) + .unwrap() + .cast::()? + .f32()? + .get(0) + .unwrap(); + + let chk = (sum_v1, sum_v2); + (ans, chk) + }); + } + + for _ in 0..2 { + let _ = run_query!("q3", { + let ans = x.left_join(&medium, "id2", "id2")?; + + let checks = ans + .clone() + .lazy() + .select(vec![col("v1").sum(), col("v2").sum()]) + .collect()?; + let sum_v1 = checks + .select_at_idx(0) + .unwrap() + .cast::()? + .f32()? + .get(0) + .unwrap(); + let sum_v2 = checks + .select_at_idx(0) + .unwrap() + .cast::()? + .f32()? + .get(0) + .unwrap(); + + let chk = (sum_v1, sum_v2); + (ans, chk) + }); + } + + for _ in 0..2 { + let _ = run_query!("q4", { + let ans = x.left_join(&medium, "id5", "id5")?; + + let checks = ans + .clone() + .lazy() + .select(vec![col("v1").sum(), col("v2").sum()]) + .collect()?; + let sum_v1 = checks + .select_at_idx(0) + .unwrap() + .cast::()? + .f32()? + .get(0) + .unwrap(); + let sum_v2 = checks + .select_at_idx(0) + .unwrap() + .cast::()? + .f32()? + .get(0) + .unwrap(); + + let chk = (sum_v1, sum_v2); + (ans, chk) + }); + } + + for _ in 0..2 { + let _ = run_query!("q6", { + let ans = x.inner_join(&medium, "id3", "id3")?; + + let checks = ans + .clone() + .lazy() + .select(vec![col("v1").sum(), col("v2").sum()]) + .collect()?; + let sum_v1 = checks + .select_at_idx(0) + .unwrap() + .cast::()? + .f32()? + .get(0) + .unwrap(); + let sum_v2 = checks + .select_at_idx(0) + .unwrap() + .cast::()? + .f32()? + .get(0) + .unwrap(); + + let chk = (sum_v1, sum_v2); + (ans, chk) + }); + } + + Ok(()) +} diff --git a/polars/setup-polars.sh b/polars/setup-polars.sh index 53e15800..e71493d3 100644 --- a/polars/setup-polars.sh +++ b/polars/setup-polars.sh @@ -1,34 +1,4 @@ #!/bin/bash set -e -# install dependencies -sudo apt-get update -qq -sudo apt-get install -y python3.6-dev virtualenv - -virtualenv polars/py-polars --python=/usr/bin/python3.6 -source polars/py-polars/bin/activate - -python -m pip install --upgrade psutil py-polars - -# build -deactivate -./polars/upg-polars.sh - -# check -source polars/py-polars/bin/activate -python -import pypolars as pl -pl.__version__ -quit() -deactivate - -# fix: print(ans.head(3), flush=True): UnicodeEncodeError: 'ascii' codec can't encode characters in position 14-31: ordinal not in range(128) -vim polars/py-polars/bin/activate -#deactivate () { -# unset PYTHONIOENCODING -# ... -#} -#... -#PYTHONIOENCODING="utf-8" -#export PYTHONIOENCODING -#... +curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh -s -- -y diff --git a/polars/upg-polars.sh b/polars/upg-polars.sh index 81d2d107..54e2d503 100755 --- a/polars/upg-polars.sh +++ b/polars/upg-polars.sh @@ -1,8 +1,4 @@ #!/bin/bash set -e -echo 'upgrading polars...' - -source ./polars/py-polars/bin/activate - -python -m pip install --upgrade py-polars > /dev/null +cd groupby && cargo update && cd .. diff --git a/polars/ver-polars.sh b/polars/ver-polars.sh index ad7522ec..8e18f22d 100755 --- a/polars/ver-polars.sh +++ b/polars/ver-polars.sh @@ -1,5 +1 @@ -#!/bin/bash -set -e - -source ./polars/py-polars/bin/activate -python -c 'import pypolars as pl; open("polars/VERSION","w").write(pl.__version__); open("polars/REVISION","w").write("");' > /dev/null +cd groupby && cargo tree | grep "├── polars" | cut -d ' ' -f3 && cd .. diff --git a/polars/groupby-polars.py b/py-polars/groupby-polars.py similarity index 100% rename from polars/groupby-polars.py rename to py-polars/groupby-polars.py diff --git a/polars/join-polars.py b/py-polars/join-polars.py similarity index 100% rename from polars/join-polars.py rename to py-polars/join-polars.py diff --git a/py-polars/setup-polars.sh b/py-polars/setup-polars.sh new file mode 100644 index 00000000..53e15800 --- /dev/null +++ b/py-polars/setup-polars.sh @@ -0,0 +1,34 @@ +#!/bin/bash +set -e + +# install dependencies +sudo apt-get update -qq +sudo apt-get install -y python3.6-dev virtualenv + +virtualenv polars/py-polars --python=/usr/bin/python3.6 +source polars/py-polars/bin/activate + +python -m pip install --upgrade psutil py-polars + +# build +deactivate +./polars/upg-polars.sh + +# check +source polars/py-polars/bin/activate +python +import pypolars as pl +pl.__version__ +quit() +deactivate + +# fix: print(ans.head(3), flush=True): UnicodeEncodeError: 'ascii' codec can't encode characters in position 14-31: ordinal not in range(128) +vim polars/py-polars/bin/activate +#deactivate () { +# unset PYTHONIOENCODING +# ... +#} +#... +#PYTHONIOENCODING="utf-8" +#export PYTHONIOENCODING +#... diff --git a/py-polars/upg-polars.sh b/py-polars/upg-polars.sh new file mode 100755 index 00000000..81d2d107 --- /dev/null +++ b/py-polars/upg-polars.sh @@ -0,0 +1,8 @@ +#!/bin/bash +set -e + +echo 'upgrading polars...' + +source ./polars/py-polars/bin/activate + +python -m pip install --upgrade py-polars > /dev/null diff --git a/py-polars/ver-polars.sh b/py-polars/ver-polars.sh new file mode 100755 index 00000000..ad7522ec --- /dev/null +++ b/py-polars/ver-polars.sh @@ -0,0 +1,5 @@ +#!/bin/bash +set -e + +source ./polars/py-polars/bin/activate +python -c 'import pypolars as pl; open("polars/VERSION","w").write(pl.__version__); open("polars/REVISION","w").write("");' > /dev/null