Module: Polars::Functions
- Included in:
- Polars
- Defined in:
- lib/polars/string_cache.rb,
lib/polars/functions/col.rb,
lib/polars/functions/len.rb,
lib/polars/functions/lit.rb,
lib/polars/functions/lazy.rb,
lib/polars/functions/eager.rb,
lib/polars/functions/random.rb,
lib/polars/functions/repeat.rb,
lib/polars/functions/business.rb,
lib/polars/functions/datatype.rb,
lib/polars/functions/whenthen.rb,
lib/polars/functions/as_datatype.rb,
lib/polars/functions/escape_regex.rb,
lib/polars/functions/range/int_range.rb,
lib/polars/functions/range/date_range.rb,
lib/polars/functions/range/time_range.rb,
lib/polars/functions/range/linear_space.rb,
lib/polars/functions/aggregation/vertical.rb,
lib/polars/functions/range/datetime_range.rb,
lib/polars/functions/aggregation/horizontal.rb
Instance Method Summary collapse
-
#align_frames(*frames, on:, how: nil, select: nil, descending: false) ⇒ Object
Align an array of frames using the unique values from one or more columns as a key.
-
#all(*names, ignore_nulls: true) ⇒ Expr
Either return an expression representing all columns, or evaluate a bitwise AND operation.
-
#all_horizontal(*exprs) ⇒ Expr
Compute the bitwise AND horizontally across columns.
-
#any(*names, ignore_nulls: true) ⇒ Expr
Evaluate a bitwise OR operation.
-
#any_horizontal(*exprs) ⇒ Expr
Compute the bitwise OR horizontally across columns.
-
#approx_n_unique(*columns) ⇒ Expr
Approximate count of unique values.
-
#arctan2(y, x) ⇒ Expr
Compute two argument arctan in radians.
-
#arg_sort_by(exprs, *more_exprs, descending: false, nulls_last: false, multithreaded: true, maintain_order: false) ⇒ Expr
Find the indexes that would sort the columns.
-
#arg_where(condition, eager: false) ⇒ Expr, Series
Return indices where
conditionevaluatestrue. -
#business_day_count(start, stop, week_mask: [true, true, true, true, true, false, false], holidays: []) ⇒ Expr
Count the number of business days between
startandend(not includingend). -
#coalesce(exprs, *more_exprs, eager: false) ⇒ Expr
Folds the columns from left to right, keeping the first non-null value.
-
#col(name, *more_names) ⇒ Expr
Return an expression representing a column in a DataFrame.
-
#collect_all(lazy_frames, optimizations: DEFAULT_QUERY_OPT_FLAGS, engine: "auto", lazy: false) ⇒ Array
Collect multiple LazyFrames at the same time.
-
#concat(items, rechunk: false, how: "vertical", parallel: true, strict: false) ⇒ Object
Aggregate multiple Dataframes/Series to a single DataFrame/Series.
-
#concat_arr(exprs, *more_exprs) ⇒ Expr
Horizontally concatenate columns into a single array column.
-
#concat_list(exprs, *more_exprs) ⇒ Expr
Concat the arrays in a Series dtype List in linear time.
-
#concat_str(exprs, *more_exprs, separator: "", ignore_nulls: false) ⇒ Expr
Horizontally concat Utf8 Series in linear time.
-
#corr(a, b, method: "pearson", ddof: nil, propagate_nans: false, eager: false) ⇒ Expr
Compute the Pearson's or Spearman rank correlation correlation between two columns.
-
#count(*columns) ⇒ Expr
Return the number of non-null values in the column.
-
#cov(a, b, ddof: 1, eager: false) ⇒ Expr
Compute the covariance between two columns/ expressions.
-
#cum_count(*columns, reverse: false) ⇒ Expr
Return the cumulative count of the non-null values in the column.
-
#cum_fold(acc, exprs, returns_scalar: false, return_dtype: nil, include_init: false, &function) ⇒ Object
Cumulatively accumulate over multiple columns horizontally/row wise with a left fold.
-
#cum_reduce(exprs, returns_scalar: false, return_dtype: nil, &function) ⇒ Expr
Cumulatively reduce horizontally across columns with a left fold.
-
#cum_sum(*names) ⇒ Expr
Cumulatively sum all values.
-
#cum_sum_horizontal(*exprs) ⇒ Expr
Cumulatively sum all values horizontally across columns.
-
#date(year, month, day) ⇒ Expr
Create a Polars literal expression of type Date.
-
#date_range(start, stop, interval = "1d", closed: "both", eager: false) ⇒ Object
Create a range of type
Datetime(orDate). -
#date_ranges(start, stop, interval = "1d", closed: "both", eager: false) ⇒ Object
Create a column of date ranges.
-
#datetime(year, month, day, hour = nil, minute = nil, second = nil, microsecond = nil, time_unit: "us", time_zone: nil, ambiguous: "raise") ⇒ Expr
Create a Polars literal expression of type Datetime.
-
#datetime_range(start, stop, interval = "1d", closed: "both", time_unit: nil, time_zone: nil, eager: false) ⇒ Object
Generate a datetime range.
-
#datetime_ranges(start, stop, interval: "1d", closed: "both", time_unit: nil, time_zone: nil, eager: false) ⇒ Object
Create a column of datetime ranges.
-
#disable_string_cache ⇒ nil
Disable and clear the global string cache.
-
#dtype_of(col_or_expr) ⇒ DataTypeExpr
Get a lazily evaluated :class:
DataTypeof a column or expression. -
#duration(weeks: nil, days: nil, hours: nil, minutes: nil, seconds: nil, milliseconds: nil, microseconds: nil, nanoseconds: nil, time_unit: nil) ⇒ Expr
Create polars
Durationfrom distinct time components. -
#element ⇒ Expr
Alias for an element in evaluated in an
evalexpression. -
#enable_string_cache ⇒ nil
Enable the global string cache.
-
#escape_regex(s) ⇒ String
Escapes string regex meta characters.
-
#exclude(columns, *more_columns) ⇒ Object
Exclude certain columns from a wildcard/regex selection.
-
#field(name) ⇒ Expr
Select a field in the current
struct.with_fieldsscope. -
#first(*columns) ⇒ Expr
Get the first value.
-
#fold(acc, exprs, returns_scalar: false, return_dtype: nil, &function) ⇒ Expr
Accumulate over multiple columns horizontally/row wise with a left fold.
-
#format(f_string, *args) ⇒ Expr
Format expressions as a string.
-
#from_epoch(column, time_unit: "s") ⇒ Object
Utility function that parses an epoch timestamp (or Unix time) to Polars Date(time).
-
#groups(column) ⇒ Object
Syntactic sugar for
Polars.col("foo").agg_groups. -
#head(column, n = 10) ⇒ Expr
Get the first
nrows. -
#implode(*columns) ⇒ Expr
Aggregate all column values into a list.
-
#int_range(start = 0, stop = nil, step: 1, eager: false, dtype: Int64) ⇒ Expr, Series
(also: #arange)
Create a range expression (or Series).
-
#int_ranges(start = 0, stop = nil, step: 1, dtype: Int64, eager: false) ⇒ Expr, Series
Generate a range of integers for each row of the input columns.
-
#last(*columns) ⇒ Expr
Get the last value.
-
#len ⇒ Expr
(also: #length)
Return the number of rows in the context.
-
#linear_spaces(start, stop, num_samples, closed: "both", as_array: false, eager: false) ⇒ Expr, Series
Generate a sequence of evenly-spaced values for each row between
startandend. -
#lit(value, dtype: nil, allow_object: false) ⇒ Expr
Return an expression representing a literal value.
-
#map_batches(exprs, return_dtype: nil, is_elementwise: false, returns_scalar: false, &function) ⇒ Expr
Map a custom function over multiple columns/expressions.
-
#map_groups(exprs, return_dtype: nil, is_elementwise: false, returns_scalar: false, &function) ⇒ Expr
Apply a custom/user-defined function (UDF) in a GroupBy context.
-
#max(*names) ⇒ Expr
Get the maximum value.
-
#max_horizontal(*exprs) ⇒ Expr
Get the maximum value horizontally across columns.
-
#mean(*columns) ⇒ Expr
Get the mean value.
-
#mean_horizontal(*exprs, ignore_nulls: true) ⇒ Expr
Compute the mean of all values horizontally across columns.
-
#median(*columns) ⇒ Expr
Get the median value.
-
#min(*names) ⇒ Expr
Get the minimum value.
-
#min_horizontal(*exprs) ⇒ Expr
Get the minimum value horizontally across columns.
-
#n_unique(*columns) ⇒ Expr
Count unique values.
-
#nth(*indices, strict: true) ⇒ Expr
Get the nth column(s) of the context.
-
#ones(n, dtype: Float64, eager: false) ⇒ Object
Construct a column of length
nfilled with ones. -
#quantile(column, quantile, interpolation: "nearest") ⇒ Expr
Syntactic sugar for
Polars.col("foo").quantile(...). -
#reduce(exprs, returns_scalar: false, return_dtype: nil, &function) ⇒ Expr
Accumulate over multiple columns horizontally/ row wise with a left fold.
-
#repeat(value, n, dtype: nil, eager: false) ⇒ Object
Repeat a single value n times.
-
#rolling_corr(a, b, window_size:, min_samples: nil, ddof: 1) ⇒ Expr
Compute the rolling correlation between two columns/ expressions.
-
#rolling_cov(a, b, window_size:, min_samples: nil, ddof: 1) ⇒ Expr
Compute the rolling covariance between two columns/ expressions.
-
#select(*exprs, eager: true, **named_exprs) ⇒ DataFrame
Run polars expressions without a context.
-
#self_dtype ⇒ DataTypeExpr
Get the dtype of
selfinmap_elementsandmap_batches. -
#set_random_seed(seed) ⇒ nil
Set the global random seed for Polars.
-
#sql_expr(sql) ⇒ Expr
Parse one or more SQL expressions to polars expression(s).
-
#std(column, ddof: 1) ⇒ Expr
Get the standard deviation.
-
#struct(*exprs, schema: nil, eager: false, **named_exprs) ⇒ Object
Collect several columns into a Series of dtype Struct.
-
#sum(*names) ⇒ Expr
Sum all values.
-
#sum_horizontal(*exprs, ignore_nulls: true) ⇒ Expr
Sum all values horizontally across columns.
-
#tail(column, n = 10) ⇒ Expr
Get the last
nrows. -
#time(hour = nil, minute = nil, second = nil, microsecond = nil) ⇒ Expr
Create a Polars literal expression of type Time.
-
#time_range(start = nil, stop = nil, interval = "1h", closed: "both", eager: false) ⇒ Object
Generate a time range.
-
#time_ranges(start = nil, stop = nil, interval = "1h", closed: "both", eager: false) ⇒ Object
Create a column of time ranges.
-
#union(items, how: "vertical", strict: false) ⇒ Object
Combine multiple DataFrames, LazyFrames, or Series into a single object.
-
#using_string_cache ⇒ Boolean
Check whether the global string cache is enabled.
-
#var(column, ddof: 1) ⇒ Expr
Get the variance.
-
#when(*predicates, **constraints) ⇒ When
Start a "when, then, otherwise" expression.
-
#zeros(n, dtype: Float64, eager: false) ⇒ Object
Construct a column of length
nfilled with zeros.
Instance Method Details
#align_frames(*frames, on:, how: nil, select: nil, descending: false) ⇒ Object
Align an array of frames using the unique values from one or more columns as a key.
Frames that do not contain the given key values have rows injected (with nulls filling the non-key columns), and each resulting frame is sorted by the key.
The original column order of input frames is not changed unless select is
specified (in which case the final column order is determined from that).
Note that this does not result in a joined frame - you receive the same number of frames back that you passed in, but each is now aligned by key and has the same number of rows.
464 465 466 467 468 469 470 471 472 473 474 475 476 477 478 479 480 481 482 483 484 485 486 487 488 489 490 491 492 493 494 495 496 497 498 499 500 501 502 503 504 505 506 507 |
# File 'lib/polars/functions/eager.rb', line 464 def align_frames( *frames, on:, how: nil, select: nil, descending: false ) # TODO update if how.nil? warn "The default `how` for `align_frames` method will change from `left` to `full` in a future version" how = "left" end if frames.empty? return [] elsif frames.map(&:class).uniq.length != 1 raise TypeError, "Input frames must be of a consistent type (all LazyFrame or all DataFrame)" end # establish the superset of all "on" column values, sort, and cache eager = frames[0].is_a?(DataFrame) alignment_frame = ( concat(frames.map { |df| df.lazy.select(on) }) .unique(maintain_order: false) .sort(on, descending: descending) ) alignment_frame = ( eager ? alignment_frame.collect.lazy : alignment_frame.cache ) # finally, align all frames aligned_frames = frames.map do |df| alignment_frame.join( df.lazy, on: alignment_frame.columns, how: how ).select(df.columns) end if !select.nil? aligned_frames = aligned_frames.map { |df| df.select(select) } end eager ? aligned_frames.map(&:collect) : aligned_frames end |
#all(*names, ignore_nulls: true) ⇒ Expr
Either return an expression representing all columns, or evaluate a bitwise AND operation.
If no arguments are passed, this function is syntactic sugar for col("*").
Otherwise, this function is syntactic sugar for col(names).all.
44 45 46 47 48 49 50 |
# File 'lib/polars/functions/aggregation/vertical.rb', line 44 def all(*names, ignore_nulls: true) if names.empty? return col("*") end col(*names).all(ignore_nulls: ignore_nulls) end |
#all_horizontal(*exprs) ⇒ Expr
Compute the bitwise AND horizontally across columns.
34 35 36 37 |
# File 'lib/polars/functions/aggregation/horizontal.rb', line 34 def all_horizontal(*exprs) rbexprs = Utils.parse_into_list_of_expressions(*exprs) Utils.wrap_expr(Plr.all_horizontal(rbexprs)) end |
#any(*names, ignore_nulls: true) ⇒ Expr
Evaluate a bitwise OR operation.
Syntactic sugar for col(names).any.
80 81 82 |
# File 'lib/polars/functions/aggregation/vertical.rb', line 80 def any(*names, ignore_nulls: true) col(*names).any(ignore_nulls: ignore_nulls) end |
#any_horizontal(*exprs) ⇒ Expr
Compute the bitwise OR horizontally across columns.
70 71 72 73 |
# File 'lib/polars/functions/aggregation/horizontal.rb', line 70 def any_horizontal(*exprs) rbexprs = Utils.parse_into_list_of_expressions(*exprs) Utils.wrap_expr(Plr.any_horizontal(rbexprs)) end |
#approx_n_unique(*columns) ⇒ Expr
Approximate count of unique values.
This function is syntactic sugar for col(columns).approx_n_unique, and
uses the HyperLogLog++ algorithm for cardinality estimation.
429 430 431 |
# File 'lib/polars/functions/lazy.rb', line 429 def approx_n_unique(*columns) col(*columns).approx_n_unique end |
#arctan2(y, x) ⇒ Expr
Compute two argument arctan in radians.
Returns the angle (in radians) in the plane between the positive x-axis and the ray from the origin to (x,y).
1340 1341 1342 1343 1344 1345 1346 1347 1348 |
# File 'lib/polars/functions/lazy.rb', line 1340 def arctan2(y, x) if Utils.strlike?(y) y = col(y) end if Utils.strlike?(x) x = col(x) end Utils.wrap_expr(Plr.arctan2(y._rbexpr, x._rbexpr)) end |
#arg_sort_by(exprs, *more_exprs, descending: false, nulls_last: false, multithreaded: true, maintain_order: false) ⇒ Expr
Find the indexes that would sort the columns.
Argsort by multiple columns. The first column will be used for the ordering. If there are duplicates in the first column, the second column will be used to determine the ordering and so on.
1505 1506 1507 1508 1509 1510 1511 1512 1513 1514 1515 1516 1517 |
# File 'lib/polars/functions/lazy.rb', line 1505 def arg_sort_by( exprs, *more_exprs, descending: false, nulls_last: false, multithreaded: true, maintain_order: false ) exprs = Utils.parse_into_list_of_expressions(exprs, *more_exprs) descending = Utils.extend_bool(descending, exprs.length, "descending", "exprs") nulls_last = Utils.extend_bool(nulls_last, exprs.length, "nulls_last", "exprs") Utils.wrap_expr(Plr.arg_sort_by(exprs, descending, nulls_last, multithreaded, maintain_order)) end |
#arg_where(condition, eager: false) ⇒ Expr, Series
Return indices where condition evaluates true.
1631 1632 1633 1634 1635 1636 1637 1638 1639 1640 1641 |
# File 'lib/polars/functions/lazy.rb', line 1631 def arg_where(condition, eager: false) if eager if !condition.is_a?(Series) raise ArgumentError, "expected 'Series' in 'arg_where' if 'eager: true', got #{condition.class.name}" end condition.to_frame.select(arg_where(Polars.col(condition.name))).to_series else condition = Utils.parse_into_expression(condition, str_as_lit: true) Utils.wrap_expr(Plr.arg_where(condition)) end end |
#business_day_count(start, stop, week_mask: [true, true, true, true, true, false, false], holidays: []) ⇒ Expr
This functionality is considered unstable. It may be changed at any point without it being considered a breaking change.
Count the number of business days between start and end (not including end).
76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 |
# File 'lib/polars/functions/business.rb', line 76 def business_day_count( start, stop, week_mask: [true, true, true, true, true, false, false], holidays: [] ) start_rbexpr = Utils.parse_into_expression(start) end_rbexpr = Utils.parse_into_expression(stop) unix_epoch = ::Date.new(1970, 1, 1) Utils.wrap_expr( Plr.business_day_count( start_rbexpr, end_rbexpr, week_mask, holidays.map { |holiday| holiday - unix_epoch } ) ) end |
#coalesce(exprs, *more_exprs, eager: false) ⇒ Expr
Folds the columns from left to right, keeping the first non-null value.
1706 1707 1708 1709 1710 1711 1712 1713 1714 1715 1716 1717 1718 1719 1720 1721 |
# File 'lib/polars/functions/lazy.rb', line 1706 def coalesce(exprs, *more_exprs, eager: false) if eager exprs = [exprs] + more_exprs series = exprs.filter_map { |e| e if e.is_a?(Series) } if !series.any? msg = "expected at least one Series in 'coalesce' if 'eager: true'" raise ArgumentError, msg end exprs = exprs.map { |e| e.is_a?(Series) ? e.name : e } Polars::DataFrame.new(series).select(coalesce(exprs, eager: false)).to_series else exprs = Utils.parse_into_list_of_expressions(exprs, *more_exprs) Utils.wrap_expr(Plr.coalesce(exprs)) end end |
#col(name, *more_names) ⇒ Expr
Return an expression representing a column in a DataFrame.
6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 |
# File 'lib/polars/functions/col.rb', line 6 def col(name, *more_names) if more_names.any? if Utils.strlike?(name) names_str = [name] names_str.concat(more_names) return Selector._by_name(names_str.map(&:to_s), strict: true, expand_patterns: true).as_expr elsif Utils.is_polars_dtype(name) dtypes = [name] dtypes.concat(more_names) return Selector._by_type(dtypes).as_expr else msg = "invalid input for `col`\n\nExpected `str` or `DataType`, got #{name.class.name}." raise TypeError, msg end end if Utils.strlike?(name) Utils.wrap_expr(Plr.col(name.to_s)) elsif Utils.is_polars_dtype(name) dtypes = [name] Selector._by_dtype(dtypes).as_expr elsif name.is_a?(::Array) || name.is_a?(::Set) names = Array(name) if names.empty? return Utils.wrap_expr(Plr.cols(names)) end item = names[0] if Utils.strlike?(item) Selector._by_name(names.map(&:to_s), strict: true, expand_patterns: true).as_expr elsif Utils.is_polars_dtype(item) Selector._by_dtype(names).as_expr else msg = "invalid input for `col`\n\nExpected iterable of type `str` or `DataType`, got iterable of type #{item.class.name}." raise TypeError, msg end else msg = "invalid input for `col`\n\nExpected `str` or `DataType`, got #{name.class.name}." raise TypeError, msg end end |
#collect_all(lazy_frames, optimizations: DEFAULT_QUERY_OPT_FLAGS, engine: "auto", lazy: false) ⇒ Array
Collect multiple LazyFrames at the same time.
This runs all the computation graphs in parallel on Polars threadpool.
1545 1546 1547 1548 1549 1550 1551 1552 1553 1554 1555 1556 1557 1558 1559 1560 1561 1562 1563 1564 1565 1566 1567 1568 1569 |
# File 'lib/polars/functions/lazy.rb', line 1545 def collect_all( lazy_frames, optimizations: DEFAULT_QUERY_OPT_FLAGS, engine: "auto", lazy: false ) lfs = lazy_frames.map { |lf| lf._ldf } if lazy msg = "the `lazy` parameter of `collect_all` is considered unstable." Utils.issue_unstable_warning(msg) ldf = Plr.collect_all_lazy(lfs, optimizations._rboptflags) lf = LazyFrame._from_pyldf(ldf) return lf end engine = LazyFrame._select_engine(engine) out = Plr.collect_all(lfs, engine, optimizations._rboptflags) # wrap the rbdataframes into dataframe result = out.map { |rbdf| Utils.wrap_df(rbdf) } result end |
#concat(items, rechunk: false, how: "vertical", parallel: true, strict: false) ⇒ Object
Aggregate multiple Dataframes/Series to a single DataFrame/Series.
98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186 187 188 189 190 191 192 193 194 195 196 197 198 199 200 201 202 203 204 205 206 207 208 209 |
# File 'lib/polars/functions/eager.rb', line 98 def concat(items, rechunk: false, how: "vertical", parallel: true, strict: false) elems = items.to_a if elems.empty? raise ArgumentError, "cannot concat empty list" end if how == "align" if !elems[0].is_a?(DataFrame) && !elems[0].is_a?(LazyFrame) msg = "'align' strategy is not supported for #{elems[0].class.name}" raise TypeError, msg end # establish common columns, maintaining the order in which they appear all_columns = elems.flat_map { |e| e.collect_schema.names } key = all_columns.uniq.map.with_index.to_h common_cols = elems.map { |e| e.collect_schema.names } .reduce { |x, y| Set.new(x) & Set.new(y) } .sort_by { |k| key[k] } # we require at least one key column for 'align' if common_cols.empty? msg = "'align' strategy requires at least one common column" raise InvalidOperationError, msg end # align the frame data using a full outer join with no suffix-resolution # (so we raise an error in case of column collision, like "horizontal") lf = elems.map { |df| df.lazy }.reduce do |x, y| x.join( y, how: "full", on: common_cols, suffix: "_PL_CONCAT_RIGHT", maintain_order: "right_left" ) # Coalesce full outer join columns .with_columns( common_cols.map { |name| F.coalesce([name, "#{name}_PL_CONCAT_RIGHT"]) } ) .drop(common_cols.map { |name| "#{name}_PL_CONCAT_RIGHT" }) end.sort(common_cols) eager = elems[0].is_a?(DataFrame) return eager ? lf.collect : lf end first = elems[0] if first.is_a?(DataFrame) if how == "vertical" out = Utils.wrap_df(Plr.concat_df(elems)) elsif how == "vertical_relaxed" out = Utils.wrap_ldf( Plr.concat_lf( elems.map { |df| df.lazy }, rechunk, parallel, true ) ).collect(optimizations: QueryOptFlags._eager) elsif how == "diagonal" out = Utils.wrap_df(Plr.concat_df_diagonal(elems)) elsif how == "diagonal_relaxed" out = Utils.wrap_ldf( Plr.concat_lf_diagonal( elems.map { |df| df.lazy }, rechunk, parallel, true ) ).collect(optimizations: QueryOptFlags._eager) elsif how == "horizontal" out = Utils.wrap_df(Plr.concat_df_horizontal(elems, strict)) else raise ArgumentError, "how must be one of {{'vertical', 'vertical_relaxed', 'diagonal', 'diagonal_relaxed', 'horizontal'}}, got #{how}" end elsif first.is_a?(LazyFrame) if how == "vertical" return Utils.wrap_ldf(Plr.concat_lf(elems, rechunk, parallel, false)) elsif how == "vertical_relaxed" return Utils.wrap_ldf(Plr.concat_lf(elems, rechunk, parallel, true)) elsif how == "diagonal" return Utils.wrap_ldf(Plr.concat_lf_diagonal(elems, rechunk, parallel, false)) elsif how == "diagonal_relaxed" return Utils.wrap_ldf(Plr.concat_lf_diagonal(elems, rechunk, parallel, true)) elsif how == "horizontal" return Utils.wrap_ldf(Plr.concat_lf_horizontal(elems, parallel, strict)) else raise ArgumentError, "Lazy only allows 'vertical', 'vertical_relaxed', 'diagonal', and 'diagonal_relaxed' concat strategy." end elsif first.is_a?(Series) if how == "vertical" out = Utils.wrap_s(Plr.concat_series(elems)) else msg = "Series only supports 'vertical' concat strategy" raise ArgumentError, msg end elsif first.is_a?(Expr) out = first elems[1..-1].each do |e| out = out.append(e) end else raise ArgumentError, "did not expect type: #{first.class.name} in 'Polars.concat'." end if rechunk out.rechunk else out end end |
#concat_arr(exprs, *more_exprs) ⇒ Expr
This functionality is considered unstable. It may be changed at any point without it being considered a breaking change.
Horizontally concatenate columns into a single array column.
Non-array columns are reshaped to a unit-width array. All columns must have
a dtype of either Polars::Array.new(<DataType>, width) or Polars::<DataType>.
417 418 419 420 |
# File 'lib/polars/functions/as_datatype.rb', line 417 def concat_arr(exprs, *more_exprs) exprs = Utils.parse_into_list_of_expressions(exprs, *more_exprs) Utils.wrap_expr(Plr.concat_arr(exprs)) end |
#concat_list(exprs, *more_exprs) ⇒ Expr
Concat the arrays in a Series dtype List in linear time.
374 375 376 377 |
# File 'lib/polars/functions/as_datatype.rb', line 374 def concat_list(exprs, *more_exprs) exprs = Utils.parse_into_list_of_expressions(exprs, *more_exprs) Utils.wrap_expr(Plr.concat_list(exprs)) end |
#concat_str(exprs, *more_exprs, separator: "", ignore_nulls: false) ⇒ Expr
Horizontally concat Utf8 Series in linear time. Non-Utf8 columns are cast to Utf8.
544 545 546 547 |
# File 'lib/polars/functions/as_datatype.rb', line 544 def concat_str(exprs, *more_exprs, separator: "", ignore_nulls: false) exprs = Utils.parse_into_list_of_expressions(exprs, *more_exprs) Utils.wrap_expr(Plr.concat_str(exprs, separator, ignore_nulls)) end |
#corr(a, b, method: "pearson", ddof: nil, propagate_nans: false, eager: false) ⇒ Expr
Compute the Pearson's or Spearman rank correlation correlation between two columns.
774 775 776 777 778 779 780 781 782 783 784 785 786 787 788 789 790 791 792 793 794 795 796 797 798 799 800 801 802 803 804 805 806 807 808 809 810 811 812 |
# File 'lib/polars/functions/lazy.rb', line 774 def corr( a, b, method: "pearson", ddof: nil, propagate_nans: false, eager: false ) if !ddof.nil? Utils.issue_deprecation_warning( "The `ddof` parameter has no effect. Do not use it." ) end if eager if !(a.is_a?(Series) || b.is_a?(Series)) msg = "expected at least one Series in 'corr' inputs if 'eager: true'" raise ArgumentError, msg end frame = Polars::DataFrame.new([a, b].filter_map { |e| e if e.is_a?(Series) }) exprs = [a, b].map { |e| e.is_a?(Series) ? e.name : e } frame.select( corr(*exprs, eager: false, method: method, propagate_nans: propagate_nans) ).to_series else a = Utils.parse_into_expression(a) b = Utils.parse_into_expression(b) if method == "pearson" Utils.wrap_expr(Plr.pearson_corr(a, b)) elsif method == "spearman" Utils.wrap_expr(Plr.spearman_rank_corr(a, b, propagate_nans)) else msg = "method must be one of {{'pearson', 'spearman'}}, got #{method}" raise ArgumentError, msg end end end |
#count(*columns) ⇒ Expr
Return the number of non-null values in the column.
This function is syntactic sugar for col(columns).count.
Calling this function without any arguments returns the number of rows in the
context. This way of using the function is deprecated. Please use len
instead.
97 98 99 100 101 102 103 104 |
# File 'lib/polars/functions/lazy.rb', line 97 def count(*columns) if columns.empty? warn "`Polars.count` is deprecated. Use `Polars.length` instead." return Utils.wrap_expr(Plr.len.alias("count")) end col(*columns).count end |
#cov(a, b, ddof: 1, eager: false) ⇒ Expr
Compute the covariance between two columns/ expressions.
860 861 862 863 864 865 866 867 868 869 870 871 872 873 874 875 |
# File 'lib/polars/functions/lazy.rb', line 860 def cov(a, b, ddof: 1, eager: false) if eager if !(a.is_a?(Series) || b.is_a?(Series)) msg = "expected at least one Series in 'cov' inputs if 'eager: true'" raise ArgumentError, msg end frame = Polars::DataFrame.new([a, b].filter_map { |e| e if e.is_a?(Series) }) exprs = [a, b].map { |e| e.is_a?(Series) ? e.name : e } frame.select(cov(*exprs, eager: false, ddof: ddof)).to_series else a_rbexpr = Utils.parse_into_expression(a) b_rbexpr = Utils.parse_into_expression(b) Utils.wrap_expr(Plr.cov(a_rbexpr, b_rbexpr, ddof)) end end |
#cum_count(*columns, reverse: false) ⇒ Expr
Return the cumulative count of the non-null values in the column.
This function is syntactic sugar for col(columns).cum_count.
If no arguments are passed, returns the cumulative count of a context. Rows containing null values count towards the result.
134 135 136 |
# File 'lib/polars/functions/lazy.rb', line 134 def cum_count(*columns, reverse: false) col(*columns).cum_count(reverse: reverse) end |
#cum_fold(acc, exprs, returns_scalar: false, return_dtype: nil, include_init: false, &function) ⇒ Object
If you simply want the first encountered expression as accumulator,
consider using cum_reduce.
Cumulatively accumulate over multiple columns horizontally/row wise with a left fold.
Every cumulative result is added as a separate field in a Struct column.
1214 1215 1216 1217 1218 1219 1220 1221 1222 1223 1224 1225 1226 1227 1228 1229 1230 1231 1232 1233 1234 1235 1236 1237 1238 1239 1240 1241 1242 1243 |
# File 'lib/polars/functions/lazy.rb', line 1214 def cum_fold( acc, exprs, returns_scalar: false, return_dtype: nil, include_init: false, &function ) acc = Utils.parse_into_expression(acc, str_as_lit: true) if exprs.is_a?(Expr) exprs = [exprs] end rt = nil if !return_dtype.nil? rt = Utils.parse_into_datatype_expr(return_dtype)._rbdatatype_expr end exprs = Utils.parse_into_list_of_expressions(exprs) Utils.wrap_expr( Plr.cum_fold( acc, _wrap_acc_lambda(function), exprs, returns_scalar, rt, include_init ).alias("cum_fold") ) end |
#cum_reduce(exprs, returns_scalar: false, return_dtype: nil, &function) ⇒ Expr
Cumulatively reduce horizontally across columns with a left fold.
Every cumulative result is added as a separate field in a Struct column.
1281 1282 1283 1284 1285 1286 1287 1288 1289 1290 1291 1292 1293 1294 1295 1296 1297 1298 1299 1300 1301 1302 1303 1304 1305 |
# File 'lib/polars/functions/lazy.rb', line 1281 def cum_reduce( exprs, returns_scalar: false, return_dtype: nil, &function ) if exprs.is_a?(Expr) exprs = [exprs] end rt = nil if !return_dtype.nil? rt = Utils.parse_into_datatype_expr(return_dtype)._rbdatatype_expr end rbexprs = Utils.parse_into_list_of_expressions(exprs) Utils.wrap_expr( Plr.cum_reduce( _wrap_acc_lambda(function), rbexprs, returns_scalar, rt ).alias("cum_reduce") ) end |
#cum_sum(*names) ⇒ Expr
Cumulatively sum all values.
Syntactic sugar for col(names).cum_sum.
277 278 279 |
# File 'lib/polars/functions/aggregation/vertical.rb', line 277 def cum_sum(*names) col(*names).cum_sum end |
#cum_sum_horizontal(*exprs) ⇒ Expr
Cumulatively sum all values horizontally across columns.
241 242 243 244 245 246 247 248 249 |
# File 'lib/polars/functions/aggregation/horizontal.rb', line 241 def cum_sum_horizontal(*exprs) rbexprs = Utils.parse_into_list_of_expressions(*exprs) exprs_wrapped = rbexprs.map { |e| Utils.wrap_expr(e) } # (Expr): use u32 as that will not cast to float as eagerly Polars.cum_fold(Polars.lit(0).cast(UInt32), exprs_wrapped) { |a, b| a + b }.alias( "cum_sum" ) end |
#date(year, month, day) ⇒ Expr
Create a Polars literal expression of type Date.
179 180 181 182 183 184 185 |
# File 'lib/polars/functions/as_datatype.rb', line 179 def date( year, month, day ) datetime(year, month, day).cast(Date).alias("date") end |
#date_range(start, stop, interval = "1d", closed: "both", eager: false) ⇒ Object
If both low and high are passed as date types (not datetime), and the
interval granularity is no finer than 1d, the returned range is also of
type date. All other permutations return a datetime Series.
Create a range of type Datetime (or Date).
37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 |
# File 'lib/polars/functions/range/date_range.rb', line 37 def date_range( start, stop, interval = "1d", closed: "both", eager: false ) interval = Utils.parse_interval_argument(interval) start_rbexpr = Utils.parse_into_expression(start) end_rbexpr = Utils.parse_into_expression(stop) result = Utils.wrap_expr( Plr.date_range(start_rbexpr, end_rbexpr, interval, closed) ) if eager return F.select(result).to_series end result end |
#date_ranges(start, stop, interval = "1d", closed: "both", eager: false) ⇒ Object
interval is created according to the following string language:
- 1ns (1 nanosecond)
- 1us (1 microsecond)
- 1ms (1 millisecond)
- 1s (1 second)
- 1m (1 minute)
- 1h (1 hour)
- 1d (1 calendar day)
- 1w (1 calendar week)
- 1mo (1 calendar month)
- 1q (1 calendar quarter)
- 1y (1 calendar year)
Or combine them: "3d12h4m25s" # 3 days, 12 hours, 4 minutes, and 25 seconds
By "calendar day", we mean the corresponding time on the next day (which may not be 24 hours, due to daylight savings). Similarly for "calendar week", "calendar month", "calendar quarter", and "calendar year".
Create a column of date ranges.
116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 |
# File 'lib/polars/functions/range/date_range.rb', line 116 def date_ranges( start, stop, interval = "1d", closed: "both", eager: false ) interval = Utils.parse_interval_argument(interval) start_rbexpr = Utils.parse_into_expression(start) end_rbexpr = Utils.parse_into_expression(stop) result = Utils.wrap_expr(Plr.date_ranges(start_rbexpr, end_rbexpr, interval, closed)) if eager return F.select(result).to_series end result end |
#datetime(year, month, day, hour = nil, minute = nil, second = nil, microsecond = nil, time_unit: "us", time_zone: nil, ambiguous: "raise") ⇒ Expr
Create a Polars literal expression of type Datetime.
90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 |
# File 'lib/polars/functions/as_datatype.rb', line 90 def datetime( year, month, day, hour = nil, minute = nil, second = nil, microsecond = nil, time_unit: "us", time_zone: nil, ambiguous: "raise" ) ambiguous_expr = Utils.parse_into_expression(ambiguous, str_as_lit: true) year_expr = Utils.parse_into_expression(year) month_expr = Utils.parse_into_expression(month) day_expr = Utils.parse_into_expression(day) hour_expr = !hour.nil? ? Utils.parse_into_expression(hour) : nil minute_expr = !minute.nil? ? Utils.parse_into_expression(minute) : nil second_expr = !second.nil? ? Utils.parse_into_expression(second) : nil microsecond_expr = ( !microsecond.nil? ? Utils.parse_into_expression(microsecond) : nil ) Utils.wrap_expr( Plr.datetime( year_expr, month_expr, day_expr, hour_expr, minute_expr, second_expr, microsecond_expr, time_unit, time_zone, ambiguous_expr ) ) end |
#datetime_range(start, stop, interval = "1d", closed: "both", time_unit: nil, time_zone: nil, eager: false) ⇒ Object
Generate a datetime range.
52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 |
# File 'lib/polars/functions/range/datetime_range.rb', line 52 def datetime_range( start, stop, interval = "1d", closed: "both", time_unit: nil, time_zone: nil, eager: false ) interval = Utils.parse_interval_argument(interval) if time_unit.nil? && interval.include?("ns") time_unit = "ns" end start_rbexpr = Utils.parse_into_expression(start) end_rbexpr = Utils.parse_into_expression(stop) result = Utils.wrap_expr( Plr.datetime_range( start_rbexpr, end_rbexpr, interval, closed, time_unit, time_zone ) ) if eager return Polars.select(result).to_series end result end |
#datetime_ranges(start, stop, interval: "1d", closed: "both", time_unit: nil, time_zone: nil, eager: false) ⇒ Object
Create a column of datetime ranges.
119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 |
# File 'lib/polars/functions/range/datetime_range.rb', line 119 def datetime_ranges( start, stop, interval: "1d", closed: "both", time_unit: nil, time_zone: nil, eager: false ) interval = Utils.parse_interval_argument(interval) if time_unit.nil? && interval.include?("ns") time_unit = "ns" end start_rbexpr = Utils.parse_into_expression(start) end_rbexpr = Utils.parse_into_expression(stop) result = Utils.wrap_expr( Plr.datetime_ranges( start_rbexpr, end_rbexpr, interval, closed, time_unit, time_zone ) ) if eager return Polars.select(result).to_series end result end |
#disable_string_cache ⇒ nil
Disable and clear the global string cache.
90 91 92 |
# File 'lib/polars/string_cache.rb', line 90 def disable_string_cache Plr.disable_string_cache end |
#dtype_of(col_or_expr) ⇒ DataTypeExpr
This functionality is considered unstable. It may be changed at any point without it being considered a breaking change.
Get a lazily evaluated :class:DataType of a column or expression.
10 11 12 13 14 15 16 17 18 19 |
# File 'lib/polars/functions/datatype.rb', line 10 def dtype_of(col_or_expr) e = nil if col_or_expr.is_a?(::String) e = F.col(col_or_expr) else e = col_or_expr end DataTypeExpr._from_rbdatatype_expr(RbDataTypeExpr.of_expr(e._rbexpr)) end |
#duration(weeks: nil, days: nil, hours: nil, minutes: nil, seconds: nil, milliseconds: nil, microseconds: nil, nanoseconds: nil, time_unit: nil) ⇒ Expr
Create polars Duration from distinct time components.
263 264 265 266 267 268 269 270 271 272 273 274 275 276 277 278 279 280 281 282 283 284 285 286 287 288 289 290 291 292 293 294 295 296 297 298 299 300 301 302 303 304 305 306 307 308 309 310 311 312 313 314 315 316 317 318 319 320 |
# File 'lib/polars/functions/as_datatype.rb', line 263 def duration( weeks: nil, days: nil, hours: nil, minutes: nil, seconds: nil, milliseconds: nil, microseconds: nil, nanoseconds: nil, time_unit: nil ) if !nanoseconds.nil? && time_unit.nil? time_unit = "ns" end if !weeks.nil? weeks = Utils.parse_into_expression(weeks, str_as_lit: false) end if !days.nil? days = Utils.parse_into_expression(days, str_as_lit: false) end if !hours.nil? hours = Utils.parse_into_expression(hours, str_as_lit: false) end if !minutes.nil? minutes = Utils.parse_into_expression(minutes, str_as_lit: false) end if !seconds.nil? seconds = Utils.parse_into_expression(seconds, str_as_lit: false) end if !milliseconds.nil? milliseconds = Utils.parse_into_expression(milliseconds, str_as_lit: false) end if !microseconds.nil? microseconds = Utils.parse_into_expression(microseconds, str_as_lit: false) end if !nanoseconds.nil? nanoseconds = Utils.parse_into_expression(nanoseconds, str_as_lit: false) end if time_unit.nil? time_unit = "us" end Utils.wrap_expr( Plr.duration( weeks, days, hours, minutes, seconds, milliseconds, microseconds, nanoseconds, time_unit ) ) end |
#element ⇒ Expr
Alias for an element in evaluated in an eval expression.
50 51 52 |
# File 'lib/polars/functions/lazy.rb', line 50 def element Utils.wrap_expr(Plr.element) end |
#enable_string_cache ⇒ nil
Enable the global string cache.
Categorical columns created under the same global string cache have
the same underlying physical value when string values are equal. This allows the
columns to be concatenated or used in a join operation, for example.
63 64 65 |
# File 'lib/polars/string_cache.rb', line 63 def enable_string_cache Plr.enable_string_cache end |
#escape_regex(s) ⇒ String
Escapes string regex meta characters.
9 10 11 12 13 14 15 16 17 18 19 |
# File 'lib/polars/functions/escape_regex.rb', line 9 def escape_regex(s) if s.is_a?(Expr) msg = "escape_regex function is unsupported for `Expr`, you may want use `Expr.str.escape_regex` instead" raise TypeError, msg elsif !s.is_a?(::String) msg = "escape_regex function supports only `String` type, got `#{s.class.name}`" raise TypeError, msg end Plr.escape_regex(s) end |
#exclude(columns, *more_columns) ⇒ Object
Exclude certain columns from a wildcard/regex selection.
1408 1409 1410 |
# File 'lib/polars/functions/lazy.rb', line 1408 def exclude(columns, *more_columns) col("*").exclude(columns, *more_columns) end |
#field(name) ⇒ Expr
Select a field in the current struct.with_fields scope.
23 24 25 26 27 28 |
# File 'lib/polars/functions/lazy.rb', line 23 def field(name) if name.is_a?(::String) name = [name] end Utils.wrap_expr(Plr.field(name)) end |
#first(*columns) ⇒ Expr
Get the first value.
485 486 487 488 489 490 491 |
# File 'lib/polars/functions/lazy.rb', line 485 def first(*columns) if columns.empty? return cs.first.as_expr end col(*columns).first end |
#fold(acc, exprs, returns_scalar: false, return_dtype: nil, &function) ⇒ Expr
Accumulate over multiple columns horizontally/row wise with a left fold.
1078 1079 1080 1081 1082 1083 1084 1085 1086 1087 1088 1089 1090 1091 1092 1093 1094 1095 1096 1097 1098 1099 1100 1101 1102 1103 1104 1105 |
# File 'lib/polars/functions/lazy.rb', line 1078 def fold( acc, exprs, returns_scalar: false, return_dtype: nil, &function ) acc = Utils.parse_into_expression(acc, str_as_lit: true) if exprs.is_a?(Expr) exprs = [exprs] end rt = nil if !return_dtype.nil? rt = Utils.parse_into_datatype_expr(return_dtype)._rbdatatype_expr end exprs = Utils.parse_into_list_of_expressions(exprs) Utils.wrap_expr( Plr.fold( acc, _wrap_acc_lambda(function), exprs, returns_scalar, rt ) ) end |
#format(f_string, *args) ⇒ Expr
Format expressions as a string.
582 583 584 585 586 587 588 589 590 591 592 593 594 595 596 597 598 599 600 |
# File 'lib/polars/functions/as_datatype.rb', line 582 def format(f_string, *args) if f_string.scan("{}").length != args.length raise ArgumentError, "number of placeholders should equal the number of arguments" end exprs = [] arguments = args.each f_string.split(/(\{\})/).each do |s| if s == "{}" e = Utils.wrap_expr(Utils.parse_into_expression(arguments.next)) exprs << e elsif s.length > 0 exprs << lit(s) end end concat_str(exprs, separator: "") end |
#from_epoch(column, time_unit: "s") ⇒ Object
Utility function that parses an epoch timestamp (or Unix time) to Polars Date(time).
Depending on the unit provided, this function will return a different dtype:
- time_unit: "d" returns pl.Date
- time_unit: "s" returns pl.Datetime"us"
- time_unit: "ms" returns pl.Datetime["ms"]
- time_unit: "us" returns pl.Datetime["us"]
- time_unit: "ns" returns pl.Datetime["ns"]
1752 1753 1754 1755 1756 1757 1758 1759 1760 1761 1762 1763 1764 1765 1766 1767 1768 |
# File 'lib/polars/functions/lazy.rb', line 1752 def from_epoch(column, time_unit: "s") if Utils.strlike?(column) column = F.col(column) elsif !column.is_a?(Series) && !column.is_a?(Expr) column = Series.new(column) end if time_unit == "d" column.cast(Date) elsif time_unit == "s" (column.cast(Int64) * 1_000_000).cast(Datetime.new("us")) elsif Utils::DTYPE_TEMPORAL_UNITS.include?(time_unit) column.cast(Datetime.new(time_unit)) else raise ArgumentError, "`time_unit` must be one of {{'ns', 'us', 'ms', 's', 'd'}}, got #{time_unit.inspect}." end end |
#groups(column) ⇒ Object
Syntactic sugar for Polars.col("foo").agg_groups.
1415 1416 1417 |
# File 'lib/polars/functions/lazy.rb', line 1415 def groups(column) col(column).agg_groups end |
#head(column, n = 10) ⇒ Expr
Get the first n rows.
This function is syntactic sugar for col(column).head(n).
642 643 644 |
# File 'lib/polars/functions/lazy.rb', line 642 def head(column, n = 10) col(column).head(n) end |
#implode(*columns) ⇒ Expr
Aggregate all column values into a list.
This function is syntactic sugar for col(name).implode.
177 178 179 |
# File 'lib/polars/functions/lazy.rb', line 177 def implode(*columns) col(*columns).implode end |
#int_range(start = 0, stop = nil, step: 1, eager: false, dtype: Int64) ⇒ Expr, Series Also known as: arange
Create a range expression (or Series).
This can be used in a select, with_column, etc. Be sure that the resulting
range size is equal to the length of the DataFrame you are collecting.
31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 |
# File 'lib/polars/functions/range/int_range.rb', line 31 def int_range(start = 0, stop = nil, step: 1, eager: false, dtype: Int64) if stop.nil? stop = start start = 0 end start = Utils.parse_into_expression(start) stop = Utils.parse_into_expression(stop) dtype ||= Int64 dtype = dtype.to_s if dtype.is_a?(Symbol) result = Utils.wrap_expr(Plr.int_range(start, stop, step, dtype)).alias("arange") if eager return select(result).to_series end result end |
#int_ranges(start = 0, stop = nil, step: 1, dtype: Int64, eager: false) ⇒ Expr, Series
Generate a range of integers for each row of the input columns.
94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 |
# File 'lib/polars/functions/range/int_range.rb', line 94 def int_ranges( start = 0, stop = nil, step: 1, dtype: Int64, eager: false ) if stop.nil? stop = start start = 0 end dtype_expr = Utils.parse_into_datatype_expr(dtype) start_rbexpr = Utils.parse_into_expression(start) end_rbexpr = Utils.parse_into_expression(stop) step_rbexpr = Utils.parse_into_expression(step) result = Utils.wrap_expr( Plr.int_ranges( start_rbexpr, end_rbexpr, step_rbexpr, dtype_expr._rbdatatype_expr ) ) if eager return F.select(result).to_series end result end |
#last(*columns) ⇒ Expr
Get the last value.
545 546 547 548 549 550 551 |
# File 'lib/polars/functions/lazy.rb', line 545 def last(*columns) if columns.empty? return cs.last.as_expr end col(*columns).last end |
#len ⇒ Expr Also known as: length
Return the number of rows in the context.
This is similar to COUNT(*) in SQL.
44 45 46 |
# File 'lib/polars/functions/len.rb', line 44 def len Utils.wrap_expr(Plr.len) end |
#linear_spaces(start, stop, num_samples, closed: "both", as_array: false, eager: false) ⇒ Expr, Series
This functionality is experimental. It may be changed at any point without it being considered a breaking change.
Generate a sequence of evenly-spaced values for each row between start and end.
The number of values in each sequence is determined by num_samples.
53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 |
# File 'lib/polars/functions/range/linear_space.rb', line 53 def linear_spaces( start, stop, num_samples, closed: "both", as_array: false, eager: false ) start_rbexpr = Utils.parse_into_expression(start) end_rbexpr = Utils.parse_into_expression(stop) num_samples_rbexpr = Utils.parse_into_expression(num_samples) result = Utils.wrap_expr( Plr.linear_spaces( start_rbexpr, end_rbexpr, num_samples_rbexpr, closed, as_array ) ) if eager return F.select(result).to_series end result end |
#lit(value, dtype: nil, allow_object: false) ⇒ Expr
Return an expression representing a literal value.
22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 |
# File 'lib/polars/functions/lit.rb', line 22 def lit(value, dtype: nil, allow_object: false) if value.is_a?(::Time) || value.is_a?(::DateTime) time_unit = dtype&.time_unit || "ns" time_zone = dtype.&time_zone e = lit(Utils.datetime_to_int(value, time_unit)).cast(Datetime.new(time_unit)) if time_zone return e.dt.replace_time_zone(time_zone.to_s) else return e end elsif value.is_a?(::Date) return lit(::Time.utc(value.year, value.month, value.day)).cast(Date) elsif value.is_a?(Polars::Series) value = value._s return Utils.wrap_expr(Plr.lit(value, allow_object, false)) elsif (defined?(Numo::NArray) && value.is_a?(Numo::NArray)) || value.is_a?(::Array) return Utils.wrap_expr(Plr.lit(Series.new("literal", [value.to_a], dtype: dtype)._s, allow_object, true)) elsif dtype return Utils.wrap_expr(Plr.lit(value, allow_object, true)).cast(dtype) end Utils.wrap_expr(Plr.lit(value, allow_object, true)) end |
#map_batches(exprs, return_dtype: nil, is_elementwise: false, returns_scalar: false, &function) ⇒ Expr
This method is much slower than the native expressions API. Only use it if you cannot implement your logic otherwise.
A UDF passed to map_batches must be pure, meaning that it cannot modify
or depend on state other than its arguments. We may call the function
with arbitrary input data.
Map a custom function over multiple columns/expressions.
Produces a single Series result.
940 941 942 943 944 945 946 947 948 949 950 951 952 953 954 955 956 957 958 959 960 961 962 963 964 965 |
# File 'lib/polars/functions/lazy.rb', line 940 def map_batches( exprs, return_dtype: nil, is_elementwise: false, returns_scalar: false, &function ) rbexprs = Utils.parse_into_list_of_expressions(exprs) return_dtype_expr = if !return_dtype.nil? Utils.parse_into_datatype_expr(return_dtype)._rbdatatype_expr else nil end Utils.wrap_expr( Plr.map_expr( rbexprs, _map_batches_wrapper(function, returns_scalar: returns_scalar), return_dtype_expr, is_elementwise, returns_scalar ) ) end |
#map_groups(exprs, return_dtype: nil, is_elementwise: false, returns_scalar: false, &function) ⇒ Expr
This method is much slower than the native expressions API. Only use it if you cannot implement your logic otherwise.
Apply a custom/user-defined function (UDF) in a GroupBy context.
1016 1017 1018 1019 1020 1021 1022 1023 1024 1025 1026 1027 1028 1029 1030 |
# File 'lib/polars/functions/lazy.rb', line 1016 def map_groups( exprs, return_dtype: nil, is_elementwise: false, returns_scalar: false, &function ) map_batches( exprs, return_dtype: return_dtype, is_elementwise: is_elementwise, returns_scalar: returns_scalar, &function ) end |
#max(*names) ⇒ Expr
Get the maximum value.
Syntactic sugar for col(names).max.
135 136 137 |
# File 'lib/polars/functions/aggregation/vertical.rb', line 135 def max(*names) col(*names).max end |
#max_horizontal(*exprs) ⇒ Expr
Get the maximum value horizontally across columns.
103 104 105 106 |
# File 'lib/polars/functions/aggregation/horizontal.rb', line 103 def max_horizontal(*exprs) rbexprs = Utils.parse_into_list_of_expressions(*exprs) Utils.wrap_expr(Plr.max_horizontal(rbexprs)) end |
#mean(*columns) ⇒ Expr
Get the mean value.
This function is syntactic sugar for col(columns).mean.
299 300 301 |
# File 'lib/polars/functions/lazy.rb', line 299 def mean(*columns) col(*columns).mean end |
#mean_horizontal(*exprs, ignore_nulls: true) ⇒ Expr
Compute the mean of all values horizontally across columns.
208 209 210 211 |
# File 'lib/polars/functions/aggregation/horizontal.rb', line 208 def mean_horizontal(*exprs, ignore_nulls: true) rbexprs = Utils.parse_into_list_of_expressions(*exprs) Utils.wrap_expr(Plr.mean_horizontal(rbexprs, ignore_nulls)) end |
#median(*columns) ⇒ Expr
Get the median value.
This function is syntactic sugar for pl.col(columns).median.
342 343 344 |
# File 'lib/polars/functions/lazy.rb', line 342 def median(*columns) col(*columns).median end |
#min(*names) ⇒ Expr
Get the minimum value.
Syntactic sugar for col(names).min.
190 191 192 |
# File 'lib/polars/functions/aggregation/vertical.rb', line 190 def min(*names) col(*names).min end |
#min_horizontal(*exprs) ⇒ Expr
Get the minimum value horizontally across columns.
136 137 138 139 |
# File 'lib/polars/functions/aggregation/horizontal.rb', line 136 def min_horizontal(*exprs) rbexprs = Utils.parse_into_list_of_expressions(*exprs) Utils.wrap_expr(Plr.min_horizontal(rbexprs)) end |
#n_unique(*columns) ⇒ Expr
Count unique values.
This function is syntactic sugar for col(columns).n_unique.
385 386 387 |
# File 'lib/polars/functions/lazy.rb', line 385 def n_unique(*columns) col(*columns).n_unique end |
#nth(*indices, strict: true) ⇒ Expr
Get the nth column(s) of the context.
594 595 596 |
# File 'lib/polars/functions/lazy.rb', line 594 def nth(*indices, strict: true) cs.by_index(*indices, require_all: strict).as_expr end |
#ones(n, dtype: Float64, eager: false) ⇒ Object
Construct a column of length n filled with ones.
This is syntactic sugar for the repeat function.
76 77 78 79 80 81 82 83 |
# File 'lib/polars/functions/repeat.rb', line 76 def ones(n, dtype: Float64, eager: false) if (zero = _one_or_zero_by_dtype(1, dtype)).nil? msg = "invalid dtype for `ones`; found #{dtype}" raise TypeError, msg end repeat(zero, n, dtype: dtype, eager: eager).alias("ones") end |
#quantile(column, quantile, interpolation: "nearest") ⇒ Expr
Syntactic sugar for Polars.col("foo").quantile(...).
1429 1430 1431 |
# File 'lib/polars/functions/lazy.rb', line 1429 def quantile(column, quantile, interpolation: "nearest") col(column).quantile(quantile, interpolation: interpolation) end |
#reduce(exprs, returns_scalar: false, return_dtype: nil, &function) ⇒ Expr
Accumulate over multiple columns horizontally/ row wise with a left fold.
1142 1143 1144 1145 1146 1147 1148 1149 1150 1151 1152 1153 1154 1155 1156 1157 1158 1159 1160 1161 1162 1163 1164 1165 1166 |
# File 'lib/polars/functions/lazy.rb', line 1142 def reduce( exprs, returns_scalar: false, return_dtype: nil, &function ) if exprs.is_a?(Expr) exprs = [exprs] end rt = nil if !return_dtype.nil? rt = Utils.parse_into_datatype_expr(return_dtype)._rbdatatype_expr end rbexprs = Utils.parse_into_list_of_expressions(exprs) Utils.wrap_expr( Plr.reduce( _wrap_acc_lambda(function), rbexprs, returns_scalar, rt ) ) end |
#repeat(value, n, dtype: nil, eager: false) ⇒ Object
Repeat a single value n times.
39 40 41 42 43 44 45 46 47 48 49 50 |
# File 'lib/polars/functions/repeat.rb', line 39 def repeat(value, n, dtype: nil, eager: false) if n.is_a?(Integer) n = lit(n) end value = Utils.parse_into_expression(value, str_as_lit: true) expr = Utils.wrap_expr(Plr.repeat(value, n._rbexpr, dtype)) if eager return select(expr).to_series end expr end |
#rolling_corr(a, b, window_size:, min_samples: nil, ddof: 1) ⇒ Expr
Compute the rolling correlation between two columns/ expressions.
The window at a given row includes the row itself and the
window_size - 1 elements before it.
1829 1830 1831 1832 1833 1834 1835 1836 1837 1838 1839 1840 1841 1842 1843 1844 1845 1846 1847 1848 |
# File 'lib/polars/functions/lazy.rb', line 1829 def rolling_corr( a, b, window_size:, min_samples: nil, ddof: 1 ) if min_samples.nil? min_samples = window_size end if Utils.strlike?(a) a = F.col(a) end if Utils.strlike?(b) b = F.col(b) end Utils.wrap_expr( Plr.rolling_corr(a._rbexpr, b._rbexpr, window_size, min_samples, ddof) ) end |
#rolling_cov(a, b, window_size:, min_samples: nil, ddof: 1) ⇒ Expr
Compute the rolling covariance between two columns/ expressions.
The window at a given row includes the row itself and the
window_size - 1 elements before it.
1789 1790 1791 1792 1793 1794 1795 1796 1797 1798 1799 1800 1801 1802 1803 1804 1805 1806 1807 1808 |
# File 'lib/polars/functions/lazy.rb', line 1789 def rolling_cov( a, b, window_size:, min_samples: nil, ddof: 1 ) if min_samples.nil? min_samples = window_size end if Utils.strlike?(a) a = F.col(a) end if Utils.strlike?(b) b = F.col(b) end Utils.wrap_expr( Plr.rolling_cov(a._rbexpr, b._rbexpr, window_size, min_samples, ddof) ) end |
#select(*exprs, eager: true, **named_exprs) ⇒ DataFrame
Run polars expressions without a context.
This is syntactic sugar for running df.select on an empty DataFrame.
1603 1604 1605 1606 |
# File 'lib/polars/functions/lazy.rb', line 1603 def select(*exprs, eager: true, **named_exprs) empty_frame = eager ? Polars::DataFrame.new : Polars::LazyFrame.new empty_frame.select(*exprs, **named_exprs) end |
#self_dtype ⇒ DataTypeExpr
This functionality is considered unstable. It may be changed at any point without it being considered a breaking change.
Get the dtype of self in map_elements and map_batches.
28 29 30 |
# File 'lib/polars/functions/datatype.rb', line 28 def self_dtype DataTypeExpr._from_rbdatatype_expr(RbDataTypeExpr.self_dtype) end |
#set_random_seed(seed) ⇒ nil
Set the global random seed for Polars.
This random seed is used to determine things such as shuffle ordering.
12 13 14 |
# File 'lib/polars/functions/random.rb', line 12 def set_random_seed(seed) Plr.set_random_seed(seed) end |
#sql_expr(sql) ⇒ Expr
Parse one or more SQL expressions to polars expression(s).
1885 1886 1887 1888 1889 1890 1891 |
# File 'lib/polars/functions/lazy.rb', line 1885 def sql_expr(sql) if sql.is_a?(::String) Utils.wrap_expr(Plr.sql_expr(sql)) else sql.map { |q| Utils.wrap_expr(Plr.sql_expr(q)) } end end |
#std(column, ddof: 1) ⇒ Expr
Get the standard deviation.
This function is syntactic sugar for col(column).std(ddof: ddof).
216 217 218 |
# File 'lib/polars/functions/lazy.rb', line 216 def std(column, ddof: 1) col(column).std(ddof: ddof) end |
#struct(*exprs, schema: nil, eager: false, **named_exprs) ⇒ Object
Collect several columns into a Series of dtype Struct.
477 478 479 480 481 482 483 484 485 486 487 488 489 490 491 492 493 494 495 496 497 |
# File 'lib/polars/functions/as_datatype.rb', line 477 def struct(*exprs, schema: nil, eager: false, **named_exprs) rbexprs = Utils.parse_into_list_of_expressions(*exprs, **named_exprs) expr = Utils.wrap_expr(Plr.as_struct(rbexprs)) if !schema.nil? && !schema.empty? if !exprs.any? # no columns or expressions provided; create one from schema keys expr = Utils.wrap_expr( Plr.as_struct(Utils.parse_into_list_of_expressions(schema.keys)) ) expr = expr.cast(Struct.new(schema), strict: false) end end if eager Polars.select(expr).to_series else expr end end |
#sum(*names) ⇒ Expr
Sum all values.
Syntactic sugar for col(name).sum.
245 246 247 |
# File 'lib/polars/functions/aggregation/vertical.rb', line 245 def sum(*names) col(*names).sum end |
#sum_horizontal(*exprs, ignore_nulls: true) ⇒ Expr
Sum all values horizontally across columns.
172 173 174 175 |
# File 'lib/polars/functions/aggregation/horizontal.rb', line 172 def sum_horizontal(*exprs, ignore_nulls: true) rbexprs = Utils.parse_into_list_of_expressions(*exprs) Utils.wrap_expr(Plr.sum_horizontal(rbexprs, ignore_nulls)) end |
#tail(column, n = 10) ⇒ Expr
Get the last n rows.
This function is syntactic sugar for col(column).tail(n).
690 691 692 |
# File 'lib/polars/functions/lazy.rb', line 690 def tail(column, n = 10) col(column).tail(n) end |
#time(hour = nil, minute = nil, second = nil, microsecond = nil) ⇒ Expr
Create a Polars literal expression of type Time.
219 220 221 222 223 224 225 226 227 228 229 |
# File 'lib/polars/functions/as_datatype.rb', line 219 def time( hour = nil, minute = nil, second = nil, microsecond = nil ) epoch_start = [1970, 1, 1] datetime(*epoch_start, hour, minute, second, microsecond) .cast(Time) .alias("time") end |
#time_range(start = nil, stop = nil, interval = "1h", closed: "both", eager: false) ⇒ Object
Generate a time range.
35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 |
# File 'lib/polars/functions/range/time_range.rb', line 35 def time_range( start = nil, stop = nil, interval = "1h", closed: "both", eager: false ) interval = Utils.parse_interval_argument(interval) ["y", "mo", "w", "d"].each do |unit| if interval.include?(unit) msg = "invalid interval unit for time_range: found #{unit.inspect}" raise ArgumentError, msg end end if start.nil? # date part is ignored start = ::Time.utc(2000, 1, 1, 0, 0, 0) end if stop.nil? # date part is ignored stop = ::Time.utc(2000, 1, 1, 23, 59, 59, 999999) end start_rbexpr = Utils.parse_into_expression(start) end_rbexpr = Utils.parse_into_expression(stop) result = Utils.wrap_expr(Plr.time_range(start_rbexpr, end_rbexpr, interval, closed)) if eager return Polars.select(result).to_series end result end |
#time_ranges(start = nil, stop = nil, interval = "1h", closed: "both", eager: false) ⇒ Object
Create a column of time ranges.
105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 |
# File 'lib/polars/functions/range/time_range.rb', line 105 def time_ranges( start = nil, stop = nil, interval = "1h", closed: "both", eager: false ) interval = Utils.parse_interval_argument(interval) ["y", "mo", "w", "d"].each do |unit| if interval.include?(unit) msg = "invalid interval unit for time_range: found #{unit.inspect}" raise ArgumentError, msg end end if start.nil? # date part is ignored start = ::Time.utc(2000, 1, 1, 0, 0, 0) end if stop.nil? # date part is ignored stop = ::Time.utc(2000, 1, 1, 23, 59, 59, 999999) end start_rbexpr = Utils.parse_into_expression(start) end_rbexpr = Utils.parse_into_expression(stop) result = Utils.wrap_expr(Plr.time_ranges(start_rbexpr, end_rbexpr, interval, closed)) if eager return Polars.select(result).to_series end result end |
#union(items, how: "vertical", strict: false) ⇒ Object
This function does not guarantee any specific ordering of rows in the result.
If you need predictable row ordering, use Polars.concat instead.
Combine multiple DataFrames, LazyFrames, or Series into a single object.
303 304 305 306 307 308 309 310 311 312 313 314 315 316 317 318 319 320 321 322 323 324 325 326 327 328 329 330 331 332 333 334 335 336 337 338 339 340 341 342 343 344 345 346 347 348 349 350 351 352 353 354 355 356 357 358 359 360 361 362 363 364 365 366 367 368 369 370 371 372 373 374 375 376 377 378 379 380 381 382 383 384 385 386 387 388 389 390 391 392 393 394 395 396 397 398 399 400 |
# File 'lib/polars/functions/eager.rb', line 303 def union( items, how: "vertical", strict: false ) elems = items.to_a if elems.empty? msg = "cannot concat empty list" raise ArgumentError, msg elsif elems.length == 1 && (elems[0].is_a?(DataFrame) || elems[0].is_a?(Series) || elems[0].is_a?(LazyFrame)) return elems[0] end if how.start_with?("align") raise Todo end out = nil first = elems[0] if first.is_a?(DataFrame) if ["vertical", "vertical_relaxed"].include?(how) out = Utils.wrap_ldf( Plr.concat_lf( elems.map { |df| df.lazy }, false, true, how.end_with?("relaxed") ) ).collect(optimizations: QueryOptFlags._eager) elsif ["diagonal", "diagonal_relaxed"].include?(how) out = Utils.wrap_ldf( Plr.concat_lf_diagonal( elems.map { |df| df.lazy }, false, true, how.end_with?("relaxed") ) ).collect(optimizations: QueryOptFlags._eager) elsif how == "horizontal" out = Utils.wrap_df(Plr.concat_df_horizontal(elems, strict)) else raise Todo msg = "DataFrame `how` must be one of {{#{allowed}}}, got #{how.inspect}" raise ArgumentError, msg end elsif first.is_a?(LazyFrame) if ["vertical", "vertical_relaxed"].include?(how) return Utils.wrap_ldf( Plr.concat_lf( elems, false, true, how.end_with?("relaxed") ) ) elsif ["diagonal", "diagonal_relaxed"].include?(how) return Utils.wrap_ldf( Plr.concat_lf_diagonal( elems, false, true, how.end_with?("relaxed") ) ) elsif how == "horizontal" return Utils.wrap_ldf( Plr.concat_lf_horizontal( elems, true, strict ) ) else raise Todo msg = "LazyFrame `how` must be one of {{#{allowed}}}, got #{how.inspect}" raise ArgumentError, msg end elsif first.is_a?(Series) if how == "vertical" out = Utils.wrap_s(Plr.concat_series(elems)) else msg = "Series only supports 'vertical' concat strategy" raise ArgumentError, msg end elsif first.is_a?(Expr) return Utils.wrap_expr(Plr.concat_expr(elems.map { |e| e._rbexpr }, false)) else msg = "did not expect type: #{first.class.name.inspect} in `concat`" raise TypeError, msg end out end |
#using_string_cache ⇒ Boolean
Check whether the global string cache is enabled.
97 98 99 |
# File 'lib/polars/string_cache.rb', line 97 def using_string_cache Plr.using_string_cache end |
#var(column, ddof: 1) ⇒ Expr
Get the variance.
This function is syntactic sugar for col(column).var(ddof: ddof).
255 256 257 |
# File 'lib/polars/functions/lazy.rb', line 255 def var(column, ddof: 1) col(column).var(ddof: ddof) end |
#when(*predicates, **constraints) ⇒ When
Start a "when, then, otherwise" expression.
91 92 93 94 |
# File 'lib/polars/functions/whenthen.rb', line 91 def when(*predicates, **constraints) condition = Utils.parse_predicates_constraints_into_expression(*predicates, **constraints) When.new(Plr.when(condition)) end |
#zeros(n, dtype: Float64, eager: false) ⇒ Object
Construct a column of length n filled with zeros.
This is syntactic sugar for the repeat function.
109 110 111 112 113 114 115 116 |
# File 'lib/polars/functions/repeat.rb', line 109 def zeros(n, dtype: Float64, eager: false) if (zero = _one_or_zero_by_dtype(0, dtype)).nil? msg = "invalid dtype for `zeros`; found #{dtype}" raise TypeError, msg end repeat(zero, n, dtype: dtype, eager: eager).alias("zeros") end |