Skip to content

Commit dd6b17d

Browse files
kiszkkou
authored andcommitted
ARROW-7479: [Rust][Ruby][R] Fix typos
This PR fixes typos in files under `rust`, `ruby`, and `r` directories Closes #6108 from kiszk/ARROW-7479 and squashes the following commits: 07098de <Kazuaki Ishizaki> address review comment 1c8927e <Kazuaki Ishizaki> address review comment 3b35733 <Kazuaki Ishizaki> fix typo a22296b <Kazuaki Ishizaki> address review comment 529a376 <Kazuaki Ishizaki> fix typos Authored-by: Kazuaki Ishizaki <[email protected]> Signed-off-by: Sutou Kouhei <[email protected]>
1 parent e24825a commit dd6b17d

File tree

21 files changed

+56
-56
lines changed

21 files changed

+56
-56
lines changed

r/R/csv.R

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -67,7 +67,7 @@
6767
#' @param as_data_frame Should the function return a `data.frame` or an
6868
#' [arrow::Table][Table]?
6969
#'
70-
#' @return A `data.frame`, or an Table if `as_data_frame = FALSE`.
70+
#' @return A `data.frame`, or a Table if `as_data_frame = FALSE`.
7171
#' @export
7272
#' @examples
7373
#' \donttest{

r/R/dplyr.R

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -21,7 +21,7 @@
2121
arrow_dplyr_query <- function(.data) {
2222
# An arrow_dplyr_query is a container for an Arrow data object (Table,
2323
# RecordBatch, or Dataset) and the state of the user's dplyr query--things
24-
# like selected colums, filters, and group vars.
24+
# like selected columns, filters, and group vars.
2525

2626
# For most dplyr methods,
2727
# method.Table == method.RecordBatch == method.Dataset == method.arrow_dplyr_query

r/R/parquet.R

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -60,7 +60,7 @@ read_parquet <- function(file,
6060
#' @param compression_level compression level. Meaning depends on compression algorithm
6161
#' @param use_dictionary Specify if we should use dictionary encoding. Default `TRUE`
6262
#' @param write_statistics Specify if we should write statistics. Default `TRUE`
63-
#' @param data_page_size Set a target threshhold for the approximate encoded
63+
#' @param data_page_size Set a target threshold for the approximate encoded
6464
#' size of data pages within a column chunk (in bytes). Default 1 MiB.
6565
#' @param properties properties for parquet writer, derived from arguments
6666
#' `version`, `compression`, `compression_level`, `use_dictionary`,
@@ -254,7 +254,7 @@ make_valid_version <- function(version, valid_versions = valid_parquet_version)
254254
#' - `compression_level`: Compression level; meaning depends on compression algorithm
255255
#' - `use_dictionary`: Specify if we should use dictionary encoding. Default `TRUE`
256256
#' - `write_statistics`: Specify if we should write statistics. Default `TRUE`
257-
#' - `data_page_size`: Set a target threshhold for the approximate encoded
257+
#' - `data_page_size`: Set a target threshold for the approximate encoded
258258
#' size of data pages within a column chunk (in bytes). Default 1 MiB.
259259
#'
260260
#' @details The parameters `compression`, `compression_level`, `use_dictionary`

r/man/ParquetWriterProperties.Rd

Lines changed: 1 addition & 1 deletion
Some generated files are not rendered by default. Learn more about customizing how changed files appear on GitHub.

r/man/read_delim_arrow.Rd

Lines changed: 1 addition & 1 deletion
Some generated files are not rendered by default. Learn more about customizing how changed files appear on GitHub.

r/man/write_parquet.Rd

Lines changed: 1 addition & 1 deletion
Some generated files are not rendered by default. Learn more about customizing how changed files appear on GitHub.

r/src/array_from_vector.cpp

Lines changed: 13 additions & 13 deletions
Original file line numberDiff line numberDiff line change
@@ -1002,7 +1002,7 @@ arrow::Status CheckCompatibleStruct(SEXP obj,
10021002
}
10031003

10041004
std::shared_ptr<arrow::Array> Array__from_vector(
1005-
SEXP x, const std::shared_ptr<arrow::DataType>& type, bool type_infered) {
1005+
SEXP x, const std::shared_ptr<arrow::DataType>& type, bool type_inferred) {
10061006
// short circuit if `x` is already an Array
10071007
if (Rf_inherits(x, "Array")) {
10081008
return Rcpp::ConstReferenceSmartPtrInputParameter<std::shared_ptr<arrow::Array>>(x);
@@ -1020,9 +1020,9 @@ std::shared_ptr<arrow::Array> Array__from_vector(
10201020
return arrow::r::MakeStringArray(x);
10211021
}
10221022

1023-
// factors only when type has been infered
1023+
// factors only when type has been inferred
10241024
if (type->id() == Type::DICTIONARY) {
1025-
if (type_infered || arrow::r::CheckCompatibleFactor(x, type)) {
1025+
if (type_inferred || arrow::r::CheckCompatibleFactor(x, type)) {
10261026
return arrow::r::MakeFactorArray(x, type);
10271027
}
10281028

@@ -1031,7 +1031,7 @@ std::shared_ptr<arrow::Array> Array__from_vector(
10311031

10321032
// struct types
10331033
if (type->id() == Type::STRUCT) {
1034-
if (!type_infered) {
1034+
if (!type_inferred) {
10351035
STOP_IF_NOT_OK(arrow::r::CheckCompatibleStruct(x, type));
10361036
}
10371037

@@ -1066,16 +1066,16 @@ std::shared_ptr<arrow::DataType> Array__infer_type(SEXP x) {
10661066
// [[arrow::export]]
10671067
std::shared_ptr<arrow::Array> Array__from_vector(SEXP x, SEXP s_type) {
10681068
// the type might be NULL, in which case we need to infer it from the data
1069-
// we keep track of whether it was infered or supplied
1070-
bool type_infered = Rf_isNull(s_type);
1069+
// we keep track of whether it was inferred or supplied
1070+
bool type_inferred = Rf_isNull(s_type);
10711071
std::shared_ptr<arrow::DataType> type;
1072-
if (type_infered) {
1072+
if (type_inferred) {
10731073
type = arrow::r::InferType(x);
10741074
} else {
10751075
type = arrow::r::extract<arrow::DataType>(s_type);
10761076
}
10771077

1078-
return arrow::r::Array__from_vector(x, type, type_infered);
1078+
return arrow::r::Array__from_vector(x, type, type_inferred);
10791079
}
10801080

10811081
// [[arrow::export]]
@@ -1084,12 +1084,12 @@ std::shared_ptr<arrow::ChunkedArray> ChunkedArray__from_list(Rcpp::List chunks,
10841084
std::vector<std::shared_ptr<arrow::Array>> vec;
10851085

10861086
// the type might be NULL, in which case we need to infer it from the data
1087-
// we keep track of whether it was infered or supplied
1088-
bool type_infered = Rf_isNull(s_type);
1087+
// we keep track of whether it was inferred or supplied
1088+
bool type_inferred = Rf_isNull(s_type);
10891089
R_xlen_t n = XLENGTH(chunks);
10901090

10911091
std::shared_ptr<arrow::DataType> type;
1092-
if (type_infered) {
1092+
if (type_inferred) {
10931093
if (n == 0) {
10941094
Rcpp::stop("type must be specified for empty list");
10951095
}
@@ -1106,11 +1106,11 @@ std::shared_ptr<arrow::ChunkedArray> ChunkedArray__from_list(Rcpp::List chunks,
11061106
vec.push_back(array);
11071107
} else {
11081108
// the first - might differ from the rest of the loop
1109-
// because we might have infered the type from the first element of the list
1109+
// because we might have inferred the type from the first element of the list
11101110
//
11111111
// this only really matters for dictionary arrays
11121112
vec.push_back(
1113-
arrow::r::Array__from_vector(VECTOR_ELT(chunks, 0), type, type_infered));
1113+
arrow::r::Array__from_vector(VECTOR_ELT(chunks, 0), type, type_inferred));
11141114

11151115
for (R_xlen_t i = 1; i < n; i++) {
11161116
vec.push_back(arrow::r::Array__from_vector(VECTOR_ELT(chunks, i), type, false));

r/src/arrow_types.h

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -253,7 +253,7 @@ namespace r {
253253
Status count_fields(SEXP lst, int* out);
254254

255255
std::shared_ptr<arrow::Array> Array__from_vector(
256-
SEXP x, const std::shared_ptr<arrow::DataType>& type, bool type_infered);
256+
SEXP x, const std::shared_ptr<arrow::DataType>& type, bool type_inferred);
257257

258258
template <typename T>
259259
std::vector<std::shared_ptr<T>> List_to_shared_ptr_vector(SEXP x) {

r/src/recordbatch.cpp

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -292,7 +292,7 @@ std::shared_ptr<arrow::RecordBatch> RecordBatch__from_arrays(SEXP schema_sxp, SE
292292
}
293293
}
294294

295-
// generate schema from the types that have been infered
295+
// generate schema from the types that have been inferred
296296
std::shared_ptr<arrow::Schema> schema;
297297

298298
std::vector<std::shared_ptr<arrow::Field>> fields(num_fields);

r/tests/testthat/test-Array.R

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -443,7 +443,7 @@ test_that("Array$create() handles data frame -> struct arrays (ARROW-3811)", {
443443
expect_equivalent(a$as_vector(), df)
444444
})
445445

446-
test_that("Array$create() can handle data frame with custom struct type (not infered)", {
446+
test_that("Array$create() can handle data frame with custom struct type (not inferred)", {
447447
df <- tibble::tibble(x = 1:10, y = 1:10)
448448
type <- struct(x = float64(), y = int16())
449449
a <- Array$create(df, type = type)

ruby/red-arrow/lib/arrow/field.rb

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -59,7 +59,7 @@ class Field
5959
# There is a shortcut for convenience. If field description
6060
# doesn't have `:data_type`, all keys except `:name` are
6161
# processes as data type description. For example, the
62-
# following field descrptions are the same:
62+
# following field descriptions are the same:
6363
#
6464
# ```ruby
6565
# {name: "visible", data_type: {type: :boolean}}

rust/arrow/src/array/array.rs

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -811,7 +811,7 @@ pub struct ListArray {
811811
}
812812

813813
impl ListArray {
814-
/// Returns an reference to the values of this list.
814+
/// Returns a reference to the values of this list.
815815
pub fn values(&self) -> ArrayRef {
816816
self.values.clone()
817817
}
@@ -946,7 +946,7 @@ pub struct FixedSizeListArray {
946946
}
947947

948948
impl FixedSizeListArray {
949-
/// Returns an reference to the values of this list.
949+
/// Returns a reference to the values of this list.
950950
pub fn values(&self) -> ArrayRef {
951951
self.values.clone()
952952
}
@@ -1745,7 +1745,7 @@ mod tests {
17451745
#[test]
17461746
fn test_time64_nanosecond_array_from_vec() {
17471747
// Test building a primitive array with null values
1748-
// we use Int32 and Int64 as a backing array, so all Int32 and Int64 convensions
1748+
// we use Int32 and Int64 as a backing array, so all Int32 and Int64 conventions
17491749
// work
17501750

17511751
// 1e6: 00:00:00.001
@@ -1942,7 +1942,7 @@ mod tests {
19421942

19431943
#[test]
19441944
fn test_primitive_array_builder() {
1945-
// Test building an primitive array with ArrayData builder and offset
1945+
// Test building a primitive array with ArrayData builder and offset
19461946
let buf = Buffer::from(&[0, 1, 2, 3, 4].to_byte_slice());
19471947
let buf2 = buf.clone();
19481948
let data = ArrayData::builder(DataType::Int32)

rust/arrow/src/array/builder.rs

Lines changed: 16 additions & 16 deletions
Original file line numberDiff line numberDiff line change
@@ -215,14 +215,14 @@ pub trait ArrayBuilder: Any {
215215
/// Builds the array
216216
fn finish(&mut self) -> ArrayRef;
217217

218-
/// Returns the builder as an non-mutable `Any` reference.
218+
/// Returns the builder as a non-mutable `Any` reference.
219219
///
220220
/// This is most useful when one wants to call non-mutable APIs on a specific builder
221221
/// type. In this case, one can first cast this into a `Any`, and then use
222222
/// `downcast_ref` to get a reference on the specific builder.
223223
fn as_any(&self) -> &Any;
224224

225-
/// Returns the builder as an mutable `Any` reference.
225+
/// Returns the builder as a mutable `Any` reference.
226226
///
227227
/// This is most useful when one wants to call mutable APIs on a specific builder
228228
/// type. In this case, one can first cast this into a `Any`, and then use
@@ -240,12 +240,12 @@ pub struct PrimitiveBuilder<T: ArrowPrimitiveType> {
240240
}
241241

242242
impl<T: ArrowPrimitiveType> ArrayBuilder for PrimitiveBuilder<T> {
243-
/// Returns the builder as an non-mutable `Any` reference.
243+
/// Returns the builder as a non-mutable `Any` reference.
244244
fn as_any(&self) -> &Any {
245245
self
246246
}
247247

248-
/// Returns the builder as an mutable `Any` reference.
248+
/// Returns the builder as a mutable `Any` reference.
249249
fn as_any_mut(&mut self) -> &mut Any {
250250
self
251251
}
@@ -354,12 +354,12 @@ impl<T: ArrayBuilder> ArrayBuilder for ListBuilder<T>
354354
where
355355
T: 'static,
356356
{
357-
/// Returns the builder as an non-mutable `Any` reference.
357+
/// Returns the builder as a non-mutable `Any` reference.
358358
fn as_any(&self) -> &Any {
359359
self
360360
}
361361

362-
/// Returns the builder as an mutable `Any` reference.
362+
/// Returns the builder as a mutable `Any` reference.
363363
fn as_any_mut(&mut self) -> &mut Any {
364364
self
365365
}
@@ -455,12 +455,12 @@ impl<T: ArrayBuilder> ArrayBuilder for FixedSizeListBuilder<T>
455455
where
456456
T: 'static,
457457
{
458-
/// Returns the builder as an non-mutable `Any` reference.
458+
/// Returns the builder as a non-mutable `Any` reference.
459459
fn as_any(&self) -> &Any {
460460
self
461461
}
462462

463-
/// Returns the builder as an mutable `Any` reference.
463+
/// Returns the builder as a mutable `Any` reference.
464464
fn as_any_mut(&mut self) -> &mut Any {
465465
self
466466
}
@@ -559,12 +559,12 @@ impl BinaryArrayBuilder for StringBuilder {}
559559
impl BinaryArrayBuilder for FixedSizeBinaryBuilder {}
560560

561561
impl ArrayBuilder for BinaryBuilder {
562-
/// Returns the builder as an non-mutable `Any` reference.
562+
/// Returns the builder as a non-mutable `Any` reference.
563563
fn as_any(&self) -> &Any {
564564
self
565565
}
566566

567-
/// Returns the builder as an mutable `Any` reference.
567+
/// Returns the builder as a mutable `Any` reference.
568568
fn as_any_mut(&mut self) -> &mut Any {
569569
self
570570
}
@@ -586,12 +586,12 @@ impl ArrayBuilder for BinaryBuilder {
586586
}
587587

588588
impl ArrayBuilder for StringBuilder {
589-
/// Returns the builder as an non-mutable `Any` reference.
589+
/// Returns the builder as a non-mutable `Any` reference.
590590
fn as_any(&self) -> &Any {
591591
self
592592
}
593593

594-
/// Returns the builder as an mutable `Any` reference.
594+
/// Returns the builder as a mutable `Any` reference.
595595
fn as_any_mut(&mut self) -> &mut Any {
596596
self
597597
}
@@ -613,12 +613,12 @@ impl ArrayBuilder for StringBuilder {
613613
}
614614

615615
impl ArrayBuilder for FixedSizeBinaryBuilder {
616-
/// Returns the builder as an non-mutable `Any` reference.
616+
/// Returns the builder as a non-mutable `Any` reference.
617617
fn as_any(&self) -> &Any {
618618
self
619619
}
620620

621-
/// Returns the builder as an mutable `Any` reference.
621+
/// Returns the builder as a mutable `Any` reference.
622622
fn as_any_mut(&mut self) -> &mut Any {
623623
self
624624
}
@@ -784,7 +784,7 @@ impl ArrayBuilder for StructBuilder {
784784
Arc::new(self.finish())
785785
}
786786

787-
/// Returns the builder as an non-mutable `Any` reference.
787+
/// Returns the builder as a non-mutable `Any` reference.
788788
///
789789
/// This is most useful when one wants to call non-mutable APIs on a specific builder
790790
/// type. In this case, one can first cast this into a `Any`, and then use
@@ -793,7 +793,7 @@ impl ArrayBuilder for StructBuilder {
793793
self
794794
}
795795

796-
/// Returns the builder as an mutable `Any` reference.
796+
/// Returns the builder as a mutable `Any` reference.
797797
///
798798
/// This is most useful when one wants to call mutable APIs on a specific builder
799799
/// type. In this case, one can first cast this into a `Any`, and then use

rust/arrow/src/ipc/gen/SparseTensor.rs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -89,7 +89,7 @@ pub enum SparseTensorIndexCOOOffset {}
8989

9090
/// ----------------------------------------------------------------------
9191
/// EXPERIMENTAL: Data structures for sparse tensors
92-
/// Coodinate (COO) format of sparse tensor index.
92+
/// Coordinate (COO) format of sparse tensor index.
9393
///
9494
/// COO's index list are represented as a NxM matrix,
9595
/// where N is the number of non-zero values,

rust/arrow/src/ipc/reader.rs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -310,7 +310,7 @@ fn create_primitive_array(
310310
make_array(array_data)
311311
}
312312

313-
/// Reads the correct number of buffers based on list type an null_count, and creates a
313+
/// Reads the correct number of buffers based on list type and null_count, and creates a
314314
/// list array ref
315315
fn create_list_array(
316316
field_node: &ipc::FieldNode,

rust/arrow/src/json/reader.rs

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -285,7 +285,7 @@ fn infer_json_schema(file: File, max_read_records: Option<usize>) -> Result<Arc<
285285
Ok(())
286286
}
287287
Value::Object(_) => Err(ArrowError::JsonError(
288-
"Reading nested JSON structes currently not supported"
288+
"Reading nested JSON structs currently not supported"
289289
.to_string(),
290290
)),
291291
}
@@ -1047,7 +1047,7 @@ mod tests {
10471047
List(Box::new(Int64)),
10481048
coerce_data_type(vec![&Int64, &List(Box::new(Int64))]).unwrap()
10491049
);
1050-
// boolean an number are incompatible, return utf8
1050+
// boolean and number are incompatible, return utf8
10511051
assert_eq!(
10521052
List(Box::new(Utf8)),
10531053
coerce_data_type(vec![&Boolean, &List(Box::new(Float64))]).unwrap()

rust/datafusion/src/execution/physical_plan/mod.rs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -65,7 +65,7 @@ pub trait AggregateExpr: Send + Sync {
6565
fn name(&self) -> String;
6666
/// Get the data type of this expression, given the schema of the input
6767
fn data_type(&self, input_schema: &Schema) -> Result<DataType>;
68-
/// Evaluate the expressioon being aggregated
68+
/// Evaluate the expression being aggregated
6969
fn evaluate_input(&self, batch: &RecordBatch) -> Result<ArrayRef>;
7070
/// Create an accumulator for this aggregate expression
7171
fn create_accumulator(&self) -> Rc<RefCell<dyn Accumulator>>;

rust/parquet/src/column/writer.rs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1426,7 +1426,7 @@ mod tests {
14261426
#[test]
14271427
fn test_column_writer_add_data_pages_with_dict() {
14281428
// ARROW-5129: Test verifies that we add data page in case of dictionary encoding
1429-
// and no fallback occured so far.
1429+
// and no fallback occurred so far.
14301430
let file = get_temp_file("test_column_writer_add_data_pages_with_dict", &[]);
14311431
let sink = FileSink::new(&file);
14321432
let page_writer = Box::new(SerializedPageWriter::new(sink));

0 commit comments

Comments
 (0)