Skip to content

Commit

Permalink
refactor(rust): Removed unnecessary flatten function (#19551)
Browse files Browse the repository at this point in the history
  • Loading branch information
orlp authored Nov 1, 2024
1 parent 0e52706 commit 992128d
Show file tree
Hide file tree
Showing 3 changed files with 20 additions and 49 deletions.
43 changes: 18 additions & 25 deletions crates/polars-io/src/file_cache/entry.rs
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,6 @@ use std::sync::{Arc, Mutex};
use fs4::fs_std::FileExt;
use polars_core::config;
use polars_error::{polars_bail, to_compute_err, PolarsError, PolarsResult};
use polars_utils::flatten;

use super::cache_lock::{self, GLOBAL_FILE_CACHE_LOCK};
use super::file_fetcher::{FileFetcher, RemoteMetadata};
Expand Down Expand Up @@ -370,32 +369,26 @@ fn get_data_file_path(
remote_version: &FileVersion,
) -> PathBuf {
let owned;
let path = flatten(
&[
path_prefix,
&[b'/', DATA_PREFIX, b'/'],
uri_hash,
match remote_version {
FileVersion::Timestamp(v) => {
owned = Some(format!("{:013x}", v));
owned.as_deref().unwrap()
},
FileVersion::ETag(v) => v.as_str(),
FileVersion::Uninitialized => panic!("impl error: version not initialized"),
}
.as_bytes(),
],
None,
);
PathBuf::from(std::str::from_utf8(&path).unwrap())
let path = [
path_prefix,
&[b'/', DATA_PREFIX, b'/'],
uri_hash,
match remote_version {
FileVersion::Timestamp(v) => {
owned = Some(format!("{:013x}", v));
owned.as_deref().unwrap()
},
FileVersion::ETag(v) => v.as_str(),
FileVersion::Uninitialized => panic!("impl error: version not initialized"),
}
.as_bytes(),
]
.concat();
PathBuf::from(String::from_utf8(path).unwrap())
}

/// `[prefix]/m/[uri hash]`
fn get_metadata_file_path(path_prefix: &[u8], uri_hash: &[u8]) -> PathBuf {
let bytes = flatten(
&[path_prefix, &[b'/', METADATA_PREFIX, b'/'], uri_hash],
None,
);
let s = std::str::from_utf8(bytes.as_slice()).unwrap();
PathBuf::from(s)
let bytes = [path_prefix, &[b'/', METADATA_PREFIX, b'/'], uri_hash].concat();
PathBuf::from(String::from_utf8(bytes).unwrap())
}
11 changes: 0 additions & 11 deletions crates/polars-utils/src/functions.rs
Original file line number Diff line number Diff line change
Expand Up @@ -15,17 +15,6 @@ pub fn range_portion(i: usize, k: usize, r: Range<usize>) -> Range<usize> {
r.start + num_before..r.start + num_before + our_size
}

// Faster than collecting from a flattened iterator.
pub fn flatten<T: Clone, R: AsRef<[T]>>(bufs: &[R], len: Option<usize>) -> Vec<T> {
let len = len.unwrap_or_else(|| bufs.iter().map(|b| b.as_ref().len()).sum());

let mut out = Vec::with_capacity(len);
for b in bufs {
out.extend_from_slice(b.as_ref());
}
out
}

pub fn arc_map<T: Clone, F: FnMut(T) -> T>(mut arc: Arc<T>, mut f: F) -> Arc<T> {
unsafe {
// Make the Arc unique (cloning if necessary).
Expand Down
15 changes: 2 additions & 13 deletions crates/polars-utils/src/python_function.rs
Original file line number Diff line number Diff line change
Expand Up @@ -8,8 +8,6 @@ pub use serde_wrap::{
SERDE_MAGIC_BYTE_MARK as PYTHON_SERDE_MAGIC_BYTE_MARK,
};

use crate::flatten;

#[derive(Clone, Debug)]
pub struct PythonFunction(pub PyObject);

Expand Down Expand Up @@ -96,10 +94,7 @@ pub fn serialize_pyobject_with_cloudpickle_fallback(py_object: &PyObject) -> Pol

let py_bytes = dumped.extract::<PyBackedBytes>().map_err(from_pyerr)?;

Ok(flatten(
&[&[used_cloudpickle as u8, b'C'][..], py_bytes.as_ref()],
None,
))
Ok([&[used_cloudpickle as u8, b'C'][..], py_bytes.as_ref()].concat())
})
}

Expand Down Expand Up @@ -129,8 +124,6 @@ mod serde_wrap {
use once_cell::sync::Lazy;
use polars_error::PolarsResult;

use crate::flatten;

pub const SERDE_MAGIC_BYTE_MARK: &[u8] = "PLPYFN".as_bytes();
/// [minor, micro]
pub static PYTHON3_VERSION: Lazy<[u8; 2]> = Lazy::new(super::get_python3_version);
Expand Down Expand Up @@ -158,11 +151,7 @@ mod serde_wrap {
.map_err(|e| S::Error::custom(e.to_string()))?;

serializer.serialize_bytes(
flatten(
&[SERDE_MAGIC_BYTE_MARK, &*PYTHON3_VERSION, dumped.as_slice()],
None,
)
.as_slice(),
&[SERDE_MAGIC_BYTE_MARK, &*PYTHON3_VERSION, dumped.as_slice()].concat(),
)
}
}
Expand Down

0 comments on commit 992128d

Please sign in to comment.