Skip to content

Commit 0dd3225

Browse files
committed
Add source root
1 parent 0cf3627 commit 0dd3225

File tree

13 files changed

+39
-32
lines changed

13 files changed

+39
-32
lines changed

src/catalog.rs

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -36,19 +36,19 @@ use std::any::Any;
3636
use std::collections::HashSet;
3737
use std::sync::Arc;
3838

39-
#[pyclass(name = "RawCatalog", module = "datafusion.catalog", subclass)]
39+
#[pyclass(frozen, name = "RawCatalog", module = "datafusion.catalog", subclass)]
4040
#[derive(Clone)]
4141
pub struct PyCatalog {
4242
pub catalog: Arc<dyn CatalogProvider>,
4343
}
4444

45-
#[pyclass(name = "RawSchema", module = "datafusion.catalog", subclass)]
45+
#[pyclass(frozen, name = "RawSchema", module = "datafusion.catalog", subclass)]
4646
#[derive(Clone)]
4747
pub struct PySchema {
4848
pub schema: Arc<dyn SchemaProvider>,
4949
}
5050

51-
#[pyclass(name = "RawTable", module = "datafusion.catalog", subclass)]
51+
#[pyclass(frozen, name = "RawTable", module = "datafusion.catalog", subclass)]
5252
#[derive(Clone)]
5353
pub struct PyTable {
5454
pub table: Arc<dyn TableProvider>,

src/config.rs

Lines changed: 4 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -23,6 +23,7 @@ use datafusion::config::ConfigOptions;
2323
use crate::errors::PyDataFusionResult;
2424
use crate::utils::py_obj_to_scalar_value;
2525

26+
// TODO: Not frozen because set needs access
2627
#[pyclass(name = "Config", module = "datafusion", subclass)]
2728
#[derive(Clone)]
2829
pub(crate) struct PyConfig {
@@ -47,7 +48,7 @@ impl PyConfig {
4748
}
4849

4950
/// Get a configuration option
50-
pub fn get<'py>(&mut self, key: &str, py: Python<'py>) -> PyResult<Bound<'py, PyAny>> {
51+
pub fn get<'py>(&self, key: &str, py: Python<'py>) -> PyResult<Bound<'py, PyAny>> {
5152
let options = self.config.to_owned();
5253
for entry in options.entries() {
5354
if entry.key == key {
@@ -65,7 +66,7 @@ impl PyConfig {
6566
}
6667

6768
/// Get all configuration options
68-
pub fn get_all(&mut self, py: Python) -> PyResult<PyObject> {
69+
pub fn get_all(&self, py: Python) -> PyResult<PyObject> {
6970
let dict = PyDict::new(py);
7071
let options = self.config.to_owned();
7172
for entry in options.entries() {
@@ -74,7 +75,7 @@ impl PyConfig {
7475
Ok(dict.into())
7576
}
7677

77-
fn __repr__(&mut self, py: Python) -> PyResult<String> {
78+
fn __repr__(&self, py: Python) -> PyResult<String> {
7879
let dict = self.get_all(py);
7980
match dict {
8081
Ok(result) => Ok(format!("Config({result})")),

src/context.rs

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -77,7 +77,7 @@ use pyo3::IntoPyObjectExt;
7777
use tokio::task::JoinHandle;
7878

7979
/// Configuration options for a SessionContext
80-
#[pyclass(name = "SessionConfig", module = "datafusion", subclass)]
80+
#[pyclass(frozen, name = "SessionConfig", module = "datafusion", subclass)]
8181
#[derive(Clone, Default)]
8282
pub struct PySessionConfig {
8383
pub config: SessionConfig,
@@ -170,7 +170,7 @@ impl PySessionConfig {
170170
}
171171

172172
/// Runtime options for a SessionContext
173-
#[pyclass(name = "RuntimeEnvBuilder", module = "datafusion", subclass)]
173+
#[pyclass(frozen, name = "RuntimeEnvBuilder", module = "datafusion", subclass)]
174174
#[derive(Clone)]
175175
pub struct PyRuntimeEnvBuilder {
176176
pub builder: RuntimeEnvBuilder,
@@ -257,7 +257,7 @@ impl PyRuntimeEnvBuilder {
257257
}
258258

259259
/// `PySQLOptions` allows you to specify options to the sql execution.
260-
#[pyclass(name = "SQLOptions", module = "datafusion", subclass)]
260+
#[pyclass(frozen, name = "SQLOptions", module = "datafusion", subclass)]
261261
#[derive(Clone)]
262262
pub struct PySQLOptions {
263263
pub options: SQLOptions,

src/dataframe.rs

Lines changed: 4 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -61,7 +61,7 @@ use crate::{
6161
// https://github.com/apache/datafusion-python/pull/1016#discussion_r1983239116
6262
// - we have not decided on the table_provider approach yet
6363
// this is an interim implementation
64-
#[pyclass(name = "TableProvider", module = "datafusion")]
64+
#[pyclass(frozen, name = "TableProvider", module = "datafusion")]
6565
pub struct PyTableProvider {
6666
provider: Arc<dyn TableProvider + Send>,
6767
}
@@ -188,7 +188,7 @@ fn build_formatter_config_from_python(formatter: &Bound<'_, PyAny>) -> PyResult<
188188
}
189189

190190
/// Python mapping of `ParquetOptions` (includes just the writer-related options).
191-
#[pyclass(name = "ParquetWriterOptions", module = "datafusion", subclass)]
191+
#[pyclass(frozen, name = "ParquetWriterOptions", module = "datafusion", subclass)]
192192
#[derive(Clone, Default)]
193193
pub struct PyParquetWriterOptions {
194194
options: ParquetOptions,
@@ -249,7 +249,7 @@ impl PyParquetWriterOptions {
249249
}
250250

251251
/// Python mapping of `ParquetColumnOptions`.
252-
#[pyclass(name = "ParquetColumnOptions", module = "datafusion", subclass)]
252+
#[pyclass(frozen, name = "ParquetColumnOptions", module = "datafusion", subclass)]
253253
#[derive(Clone, Default)]
254254
pub struct PyParquetColumnOptions {
255255
options: ParquetColumnOptions,
@@ -284,6 +284,7 @@ impl PyParquetColumnOptions {
284284
/// A PyDataFrame is a representation of a logical plan and an API to compose statements.
285285
/// Use it to build a plan and `.collect()` to execute the plan and collect the result.
286286
/// The actual execution of a plan runs natively on Rust and Arrow on a multi-threaded environment.
287+
// TODO: Not frozen because batches don't currently handle interior mutability
287288
#[pyclass(name = "DataFrame", module = "datafusion", subclass)]
288289
#[derive(Clone)]
289290
pub struct PyDataFrame {

src/expr.rs

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -115,7 +115,7 @@ pub mod window;
115115
use sort_expr::{to_sort_expressions, PySortExpr};
116116

117117
/// A PyExpr that can be used on a DataFrame
118-
#[pyclass(name = "RawExpr", module = "datafusion.expr", subclass)]
118+
#[pyclass(frozen, name = "RawExpr", module = "datafusion.expr", subclass)]
119119
#[derive(Debug, Clone)]
120120
pub struct PyExpr {
121121
pub expr: Expr,
@@ -637,7 +637,7 @@ impl PyExpr {
637637
}
638638
}
639639

640-
#[pyclass(name = "ExprFuncBuilder", module = "datafusion.expr", subclass)]
640+
#[pyclass(frozen, name = "ExprFuncBuilder", module = "datafusion.expr", subclass)]
641641
#[derive(Debug, Clone)]
642642
pub struct PyExprFuncBuilder {
643643
pub builder: ExprFuncBuilder,

src/physical_plan.rs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -24,7 +24,7 @@ use pyo3::{exceptions::PyRuntimeError, prelude::*, types::PyBytes};
2424

2525
use crate::{context::PySessionContext, errors::PyDataFusionResult};
2626

27-
#[pyclass(name = "ExecutionPlan", module = "datafusion", subclass)]
27+
#[pyclass(frozen, name = "ExecutionPlan", module = "datafusion", subclass)]
2828
#[derive(Debug, Clone)]
2929
pub struct PyExecutionPlan {
3030
pub plan: Arc<dyn ExecutionPlan>,

src/record_batch.rs

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -28,7 +28,7 @@ use pyo3::prelude::*;
2828
use pyo3::{pyclass, pymethods, PyObject, PyResult, Python};
2929
use tokio::sync::Mutex;
3030

31-
#[pyclass(name = "RecordBatch", module = "datafusion", subclass)]
31+
#[pyclass(frozen, name = "RecordBatch", module = "datafusion", subclass)]
3232
pub struct PyRecordBatch {
3333
batch: RecordBatch,
3434
}
@@ -46,7 +46,7 @@ impl From<RecordBatch> for PyRecordBatch {
4646
}
4747
}
4848

49-
#[pyclass(name = "RecordBatchStream", module = "datafusion", subclass)]
49+
#[pyclass(frozen, name = "RecordBatchStream", module = "datafusion", subclass)]
5050
pub struct PyRecordBatchStream {
5151
stream: Arc<Mutex<SendableRecordBatchStream>>,
5252
}
@@ -61,12 +61,12 @@ impl PyRecordBatchStream {
6161

6262
#[pymethods]
6363
impl PyRecordBatchStream {
64-
fn next(&mut self, py: Python) -> PyResult<PyRecordBatch> {
64+
fn next(&self, py: Python) -> PyResult<PyRecordBatch> {
6565
let stream = self.stream.clone();
6666
wait_for_future(py, next_stream(stream, true))?
6767
}
6868

69-
fn __next__(&mut self, py: Python) -> PyResult<PyRecordBatch> {
69+
fn __next__(&self, py: Python) -> PyResult<PyRecordBatch> {
7070
self.next(py)
7171
}
7272

src/store.rs

Lines changed: 10 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -36,7 +36,12 @@ pub enum StorageContexts {
3636
HTTP(PyHttpContext),
3737
}
3838

39-
#[pyclass(name = "LocalFileSystem", module = "datafusion.store", subclass)]
39+
#[pyclass(
40+
frozen,
41+
name = "LocalFileSystem",
42+
module = "datafusion.store",
43+
subclass
44+
)]
4045
#[derive(Debug, Clone)]
4146
pub struct PyLocalFileSystemContext {
4247
pub inner: Arc<LocalFileSystem>,
@@ -62,7 +67,7 @@ impl PyLocalFileSystemContext {
6267
}
6368
}
6469

65-
#[pyclass(name = "MicrosoftAzure", module = "datafusion.store", subclass)]
70+
#[pyclass(frozen, name = "MicrosoftAzure", module = "datafusion.store", subclass)]
6671
#[derive(Debug, Clone)]
6772
pub struct PyMicrosoftAzureContext {
6873
pub inner: Arc<MicrosoftAzure>,
@@ -134,7 +139,7 @@ impl PyMicrosoftAzureContext {
134139
}
135140
}
136141

137-
#[pyclass(name = "GoogleCloud", module = "datafusion.store", subclass)]
142+
#[pyclass(frozen, name = "GoogleCloud", module = "datafusion.store", subclass)]
138143
#[derive(Debug, Clone)]
139144
pub struct PyGoogleCloudContext {
140145
pub inner: Arc<GoogleCloudStorage>,
@@ -164,7 +169,7 @@ impl PyGoogleCloudContext {
164169
}
165170
}
166171

167-
#[pyclass(name = "AmazonS3", module = "datafusion.store", subclass)]
172+
#[pyclass(frozen, name = "AmazonS3", module = "datafusion.store", subclass)]
168173
#[derive(Debug, Clone)]
169174
pub struct PyAmazonS3Context {
170175
pub inner: Arc<AmazonS3>,
@@ -223,7 +228,7 @@ impl PyAmazonS3Context {
223228
}
224229
}
225230

226-
#[pyclass(name = "Http", module = "datafusion.store", subclass)]
231+
#[pyclass(frozen, name = "Http", module = "datafusion.store", subclass)]
227232
#[derive(Debug, Clone)]
228233
pub struct PyHttpContext {
229234
pub url: String,

src/substrait.rs

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -27,7 +27,7 @@ use datafusion_substrait::serializer;
2727
use datafusion_substrait::substrait::proto::Plan;
2828
use prost::Message;
2929

30-
#[pyclass(name = "Plan", module = "datafusion.substrait", subclass)]
30+
#[pyclass(frozen, name = "Plan", module = "datafusion.substrait", subclass)]
3131
#[derive(Debug, Clone)]
3232
pub struct PyPlan {
3333
pub plan: Plan,
@@ -59,7 +59,7 @@ impl From<Plan> for PyPlan {
5959
/// A PySubstraitSerializer is a representation of a Serializer that is capable of both serializing
6060
/// a `LogicalPlan` instance to Substrait Protobuf bytes and also deserialize Substrait Protobuf bytes
6161
/// to a valid `LogicalPlan` instance.
62-
#[pyclass(name = "Serde", module = "datafusion.substrait", subclass)]
62+
#[pyclass(frozen, name = "Serde", module = "datafusion.substrait", subclass)]
6363
#[derive(Debug, Clone)]
6464
pub struct PySubstraitSerializer;
6565

@@ -112,7 +112,7 @@ impl PySubstraitSerializer {
112112
}
113113
}
114114

115-
#[pyclass(name = "Producer", module = "datafusion.substrait", subclass)]
115+
#[pyclass(frozen, name = "Producer", module = "datafusion.substrait", subclass)]
116116
#[derive(Debug, Clone)]
117117
pub struct PySubstraitProducer;
118118

@@ -129,7 +129,7 @@ impl PySubstraitProducer {
129129
}
130130
}
131131

132-
#[pyclass(name = "Consumer", module = "datafusion.substrait", subclass)]
132+
#[pyclass(frozen, name = "Consumer", module = "datafusion.substrait", subclass)]
133133
#[derive(Debug, Clone)]
134134
pub struct PySubstraitConsumer;
135135

src/udaf.rs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -155,7 +155,7 @@ pub fn to_rust_accumulator(accum: PyObject) -> AccumulatorFactoryFunction {
155155
}
156156

157157
/// Represents an AggregateUDF
158-
#[pyclass(name = "AggregateUDF", module = "datafusion", subclass)]
158+
#[pyclass(frozen, name = "AggregateUDF", module = "datafusion", subclass)]
159159
#[derive(Debug, Clone)]
160160
pub struct PyAggregateUDF {
161161
pub(crate) function: AggregateUDF,

0 commit comments

Comments
 (0)