Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Adjust visibility of crate private members & Functions #537

Merged
merged 3 commits into from
Dec 3, 2023
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
10 changes: 5 additions & 5 deletions src/catalog.rs
Original file line number Diff line number Diff line change
Expand Up @@ -30,18 +30,18 @@ use datafusion::{
};

#[pyclass(name = "Catalog", module = "datafusion", subclass)]
pub(crate) struct PyCatalog {
catalog: Arc<dyn CatalogProvider>,
pub struct PyCatalog {
pub catalog: Arc<dyn CatalogProvider>,
}

#[pyclass(name = "Database", module = "datafusion", subclass)]
pub(crate) struct PyDatabase {
database: Arc<dyn SchemaProvider>,
pub struct PyDatabase {
pub database: Arc<dyn SchemaProvider>,
}

#[pyclass(name = "Table", module = "datafusion", subclass)]
pub struct PyTable {
table: Arc<dyn TableProvider>,
pub table: Arc<dyn TableProvider>,
}

impl PyCatalog {
Expand Down
2 changes: 1 addition & 1 deletion src/common/df_field.rs
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,7 @@ use super::data_type::PyDataType;
#[pyclass(name = "DFField", module = "datafusion.common", subclass)]
#[derive(Debug, Clone)]
pub struct PyDFField {
field: DFField,
pub field: DFField,
}

impl From<PyDFField> for DFField {
Expand Down
77 changes: 40 additions & 37 deletions src/context.rs
Original file line number Diff line number Diff line change
Expand Up @@ -60,8 +60,8 @@
/// Configuration options for a SessionContext
#[pyclass(name = "SessionConfig", module = "datafusion", subclass)]
#[derive(Clone, Default)]
pub(crate) struct PySessionConfig {
pub(crate) config: SessionConfig,
pub struct PySessionConfig {
pub config: SessionConfig,
}

impl From<SessionConfig> for PySessionConfig {
Expand Down Expand Up @@ -153,8 +153,8 @@
/// Runtime options for a SessionContext
#[pyclass(name = "RuntimeConfig", module = "datafusion", subclass)]
#[derive(Clone)]
pub(crate) struct PyRuntimeConfig {
pub(crate) config: RuntimeConfig,
pub struct PyRuntimeConfig {
pub config: RuntimeConfig,
}

#[pymethods]
Expand Down Expand Up @@ -215,15 +215,18 @@
/// multi-threaded execution engine to perform the execution.
#[pyclass(name = "SessionContext", module = "datafusion", subclass)]
#[derive(Clone)]
pub(crate) struct PySessionContext {
pub(crate) ctx: SessionContext,
pub struct PySessionContext {
pub ctx: SessionContext,
}

#[pymethods]
impl PySessionContext {
#[pyo3(signature = (config=None, runtime=None))]
#[new]
fn new(config: Option<PySessionConfig>, runtime: Option<PyRuntimeConfig>) -> PyResult<Self> {
pub fn new(
config: Option<PySessionConfig>,
runtime: Option<PyRuntimeConfig>,
) -> PyResult<Self> {
let config = if let Some(c) = config {
c.config
} else {
Expand All @@ -242,7 +245,7 @@
}

/// Register a an object store with the given name
fn register_object_store(
pub fn register_object_store(
&mut self,
scheme: &str,
store: &PyAny,
Expand Down Expand Up @@ -276,13 +279,13 @@
}

/// Returns a PyDataFrame whose plan corresponds to the SQL statement.
fn sql(&mut self, query: &str, py: Python) -> PyResult<PyDataFrame> {
pub fn sql(&mut self, query: &str, py: Python) -> PyResult<PyDataFrame> {

Check warning on line 282 in src/context.rs

View workflow job for this annotation

GitHub Actions / test-matrix (3.10, stable)

type `dataframe::PyDataFrame` is more private than the item `context::PySessionContext::sql`
let result = self.ctx.sql(query);
let df = wait_for_future(py, result).map_err(DataFusionError::from)?;
Ok(PyDataFrame::new(df))
}

fn create_dataframe(
pub fn create_dataframe(

Check warning on line 288 in src/context.rs

View workflow job for this annotation

GitHub Actions / test-matrix (3.10, stable)

type `dataframe::PyDataFrame` is more private than the item `context::PySessionContext::create_dataframe`
&mut self,
partitions: PyArrowType<Vec<Vec<RecordBatch>>>,
name: Option<&str>,
Expand Down Expand Up @@ -314,13 +317,13 @@
}

/// Create a DataFrame from an existing logical plan
fn create_dataframe_from_logical_plan(&mut self, plan: PyLogicalPlan) -> PyDataFrame {
pub fn create_dataframe_from_logical_plan(&mut self, plan: PyLogicalPlan) -> PyDataFrame {

Check warning on line 320 in src/context.rs

View workflow job for this annotation

GitHub Actions / test-matrix (3.10, stable)

type `dataframe::PyDataFrame` is more private than the item `context::PySessionContext::create_dataframe_from_logical_plan`
PyDataFrame::new(DataFrame::new(self.ctx.state(), plan.plan.as_ref().clone()))
}

/// Construct datafusion dataframe from Python list
#[allow(clippy::wrong_self_convention)]
fn from_pylist(
pub fn from_pylist(

Check warning on line 326 in src/context.rs

View workflow job for this annotation

GitHub Actions / test-matrix (3.10, stable)

type `dataframe::PyDataFrame` is more private than the item `context::PySessionContext::from_pylist`
&mut self,
data: PyObject,
name: Option<&str>,
Expand All @@ -340,7 +343,7 @@

/// Construct datafusion dataframe from Python dictionary
#[allow(clippy::wrong_self_convention)]
fn from_pydict(
pub fn from_pydict(

Check warning on line 346 in src/context.rs

View workflow job for this annotation

GitHub Actions / test-matrix (3.10, stable)

type `dataframe::PyDataFrame` is more private than the item `context::PySessionContext::from_pydict`
&mut self,
data: PyObject,
name: Option<&str>,
Expand All @@ -360,7 +363,7 @@

/// Construct datafusion dataframe from Arrow Table
#[allow(clippy::wrong_self_convention)]
fn from_arrow_table(
pub fn from_arrow_table(

Check warning on line 366 in src/context.rs

View workflow job for this annotation

GitHub Actions / test-matrix (3.10, stable)

type `dataframe::PyDataFrame` is more private than the item `context::PySessionContext::from_arrow_table`
&mut self,
data: PyObject,
name: Option<&str>,
Expand All @@ -381,7 +384,7 @@

/// Construct datafusion dataframe from pandas
#[allow(clippy::wrong_self_convention)]
fn from_pandas(
pub fn from_pandas(

Check warning on line 387 in src/context.rs

View workflow job for this annotation

GitHub Actions / test-matrix (3.10, stable)

type `dataframe::PyDataFrame` is more private than the item `context::PySessionContext::from_pandas`
&mut self,
data: PyObject,
name: Option<&str>,
Expand All @@ -401,7 +404,7 @@

/// Construct datafusion dataframe from polars
#[allow(clippy::wrong_self_convention)]
fn from_polars(
pub fn from_polars(

Check warning on line 407 in src/context.rs

View workflow job for this annotation

GitHub Actions / test-matrix (3.10, stable)

type `dataframe::PyDataFrame` is more private than the item `context::PySessionContext::from_polars`
&mut self,
data: PyObject,
name: Option<&str>,
Expand All @@ -417,21 +420,21 @@
})
}

fn register_table(&mut self, name: &str, table: &PyTable) -> PyResult<()> {
pub fn register_table(&mut self, name: &str, table: &PyTable) -> PyResult<()> {
self.ctx
.register_table(name, table.table())
.map_err(DataFusionError::from)?;
Ok(())
}

fn deregister_table(&mut self, name: &str) -> PyResult<()> {
pub fn deregister_table(&mut self, name: &str) -> PyResult<()> {
self.ctx
.deregister_table(name)
.map_err(DataFusionError::from)?;
Ok(())
}

fn register_record_batches(
pub fn register_record_batches(
&mut self,
name: &str,
partitions: PyArrowType<Vec<Vec<RecordBatch>>>,
Expand All @@ -451,7 +454,7 @@
skip_metadata=true,
schema=None,
file_sort_order=None))]
fn register_parquet(
pub fn register_parquet(
&mut self,
name: &str,
path: &str,
Expand Down Expand Up @@ -489,7 +492,7 @@
schema_infer_max_records=1000,
file_extension=".csv",
file_compression_type=None))]
fn register_csv(
pub fn register_csv(
&mut self,
name: &str,
path: PathBuf,
Expand Down Expand Up @@ -533,7 +536,7 @@
file_extension=".json",
table_partition_cols=vec![],
file_compression_type=None))]
fn register_json(
pub fn register_json(
&mut self,
name: &str,
path: PathBuf,
Expand Down Expand Up @@ -568,7 +571,7 @@
file_extension=".avro",
table_partition_cols=vec![],
infinite=false))]
fn register_avro(
pub fn register_avro(
&mut self,
name: &str,
path: PathBuf,
Expand All @@ -595,7 +598,7 @@
}

// Registers a PyArrow.Dataset
fn register_dataset(&self, name: &str, dataset: &PyAny, py: Python) -> PyResult<()> {
pub fn register_dataset(&self, name: &str, dataset: &PyAny, py: Python) -> PyResult<()> {
let table: Arc<dyn TableProvider> = Arc::new(Dataset::new(dataset, py)?);

self.ctx
Expand All @@ -605,18 +608,18 @@
Ok(())
}

fn register_udf(&mut self, udf: PyScalarUDF) -> PyResult<()> {
pub fn register_udf(&mut self, udf: PyScalarUDF) -> PyResult<()> {
self.ctx.register_udf(udf.function);
Ok(())
}

fn register_udaf(&mut self, udaf: PyAggregateUDF) -> PyResult<()> {
pub fn register_udaf(&mut self, udaf: PyAggregateUDF) -> PyResult<()> {
self.ctx.register_udaf(udaf.function);
Ok(())
}

#[pyo3(signature = (name="datafusion"))]
fn catalog(&self, name: &str) -> PyResult<PyCatalog> {
pub fn catalog(&self, name: &str) -> PyResult<PyCatalog> {
match self.ctx.catalog(name) {
Some(catalog) => Ok(PyCatalog::new(catalog)),
None => Err(PyKeyError::new_err(format!(
Expand All @@ -626,31 +629,31 @@
}
}

fn tables(&self) -> HashSet<String> {
pub fn tables(&self) -> HashSet<String> {
#[allow(deprecated)]
self.ctx.tables().unwrap()
}

fn table(&self, name: &str, py: Python) -> PyResult<PyDataFrame> {
pub fn table(&self, name: &str, py: Python) -> PyResult<PyDataFrame> {

Check warning on line 637 in src/context.rs

View workflow job for this annotation

GitHub Actions / test-matrix (3.10, stable)

type `dataframe::PyDataFrame` is more private than the item `context::PySessionContext::table`
let x = wait_for_future(py, self.ctx.table(name)).map_err(DataFusionError::from)?;
Ok(PyDataFrame::new(x))
}

fn table_exist(&self, name: &str) -> PyResult<bool> {
pub fn table_exist(&self, name: &str) -> PyResult<bool> {
Ok(self.ctx.table_exist(name)?)
}

fn empty_table(&self) -> PyResult<PyDataFrame> {
pub fn empty_table(&self) -> PyResult<PyDataFrame> {

Check warning on line 646 in src/context.rs

View workflow job for this annotation

GitHub Actions / test-matrix (3.10, stable)

type `dataframe::PyDataFrame` is more private than the item `context::PySessionContext::empty_table`
Ok(PyDataFrame::new(self.ctx.read_empty()?))
}

fn session_id(&self) -> String {
pub fn session_id(&self) -> String {
self.ctx.session_id()
}

#[allow(clippy::too_many_arguments)]
#[pyo3(signature = (path, schema=None, schema_infer_max_records=1000, file_extension=".json", table_partition_cols=vec![], file_compression_type=None))]
fn read_json(
pub fn read_json(
&mut self,
path: PathBuf,
schema: Option<PyArrowType<Schema>>,
Expand Down Expand Up @@ -689,7 +692,7 @@
file_extension=".csv",
table_partition_cols=vec![],
file_compression_type=None))]
fn read_csv(
pub fn read_csv(
&self,
path: PathBuf,
schema: Option<PyArrowType<Schema>>,
Expand Down Expand Up @@ -741,7 +744,7 @@
skip_metadata=true,
schema=None,
file_sort_order=None))]
fn read_parquet(
pub fn read_parquet(
&self,
path: &str,
table_partition_cols: Vec<(String, String)>,
Expand Down Expand Up @@ -771,7 +774,7 @@

#[allow(clippy::too_many_arguments)]
#[pyo3(signature = (path, schema=None, table_partition_cols=vec![], file_extension=".avro"))]
fn read_avro(
pub fn read_avro(
&self,
path: &str,
schema: Option<PyArrowType<Schema>>,
Expand All @@ -793,7 +796,7 @@
Ok(PyDataFrame::new(df))
}

fn read_table(&self, table: &PyTable) -> PyResult<PyDataFrame> {
pub fn read_table(&self, table: &PyTable) -> PyResult<PyDataFrame> {
let df = self
.ctx
.read_table(table.table())
Expand Down
1 change: 1 addition & 0 deletions src/expr.rs
Original file line number Diff line number Diff line change
Expand Up @@ -128,6 +128,7 @@ impl PyExpr {
Expr::ScalarVariable(data_type, variables) => {
Ok(PyScalarVariable::new(data_type, variables).into_py(py))
}
Expr::Like(value) => Ok(PyLike::from(value.clone()).into_py(py)),
Expr::Literal(value) => Ok(PyLiteral::from(value.clone()).into_py(py)),
Expr::BinaryExpr(expr) => Ok(PyBinaryExpr::from(expr.clone()).into_py(py)),
Expr::Not(expr) => Ok(PyNot::new(*expr.clone()).into_py(py)),
Expand Down
2 changes: 1 addition & 1 deletion src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -35,7 +35,7 @@ pub mod common;
#[allow(clippy::borrow_deref_ref)]
mod config;
#[allow(clippy::borrow_deref_ref)]
mod context;
pub mod context;
#[allow(clippy::borrow_deref_ref)]
mod dataframe;
mod dataset;
Expand Down
12 changes: 6 additions & 6 deletions src/substrait.rs
Original file line number Diff line number Diff line change
Expand Up @@ -29,8 +29,8 @@ use prost::Message;

#[pyclass(name = "plan", module = "datafusion.substrait", subclass)]
#[derive(Debug, Clone)]
pub(crate) struct PyPlan {
pub(crate) plan: Plan,
pub struct PyPlan {
pub plan: Plan,
}

#[pymethods]
Expand Down Expand Up @@ -61,7 +61,7 @@ impl From<Plan> for PyPlan {
/// to a valid `LogicalPlan` instance.
#[pyclass(name = "serde", module = "datafusion.substrait", subclass)]
#[derive(Debug, Clone)]
pub(crate) struct PySubstraitSerializer;
pub struct PySubstraitSerializer;

#[pymethods]
impl PySubstraitSerializer {
Expand Down Expand Up @@ -107,7 +107,7 @@ impl PySubstraitSerializer {

#[pyclass(name = "producer", module = "datafusion.substrait", subclass)]
#[derive(Debug, Clone)]
pub(crate) struct PySubstraitProducer;
pub struct PySubstraitProducer;

#[pymethods]
impl PySubstraitProducer {
Expand All @@ -123,7 +123,7 @@ impl PySubstraitProducer {

#[pyclass(name = "consumer", module = "datafusion.substrait", subclass)]
#[derive(Debug, Clone)]
pub(crate) struct PySubstraitConsumer;
pub struct PySubstraitConsumer;

#[pymethods]
impl PySubstraitConsumer {
Expand All @@ -140,7 +140,7 @@ impl PySubstraitConsumer {
}
}

pub(crate) fn init_module(m: &PyModule) -> PyResult<()> {
pub fn init_module(m: &PyModule) -> PyResult<()> {
m.add_class::<PyPlan>()?;
m.add_class::<PySubstraitConsumer>()?;
m.add_class::<PySubstraitProducer>()?;
Expand Down
Loading
Loading