-
Notifications
You must be signed in to change notification settings - Fork 121
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
- Loading branch information
1 parent
e7ca81e
commit 973b499
Showing
3 changed files
with
140 additions
and
2 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,135 @@ | ||
from __future__ import annotations | ||
|
||
from typing import TYPE_CHECKING | ||
|
||
if TYPE_CHECKING: | ||
from pyspark.sql import Column | ||
from typing_extensions import Self | ||
|
||
from narwhals._spark_like.expr import SparkLikeExpr | ||
|
||
|
||
class SparkLikeExprDateTimeNamespace: | ||
def __init__(self: Self, expr: SparkLikeExpr) -> None: | ||
self._compliant_expr = expr | ||
|
||
def date(self: Self) -> SparkLikeExpr: | ||
from pyspark.sql import functions as F # noqa: N812 | ||
|
||
return self._compliant_expr._from_call( | ||
F.to_date, | ||
"date", | ||
returns_scalar=self._compliant_expr._returns_scalar, | ||
) | ||
|
||
def year(self: Self) -> SparkLikeExpr: | ||
from pyspark.sql import functions as F # noqa: N812 | ||
|
||
return self._compliant_expr._from_call( | ||
F.year, | ||
"year", | ||
returns_scalar=self._compliant_expr._returns_scalar, | ||
) | ||
|
||
def month(self: Self) -> SparkLikeExpr: | ||
from pyspark.sql import functions as F # noqa: N812 | ||
|
||
return self._compliant_expr._from_call( | ||
F.month, | ||
"month", | ||
returns_scalar=self._compliant_expr._returns_scalar, | ||
) | ||
|
||
def day(self: Self) -> SparkLikeExpr: | ||
from pyspark.sql import functions as F # noqa: N812 | ||
|
||
return self._compliant_expr._from_call( | ||
F.dayofmonth, | ||
"day", | ||
returns_scalar=self._compliant_expr._returns_scalar, | ||
) | ||
|
||
def hour(self: Self) -> SparkLikeExpr: | ||
from pyspark.sql import functions as F # noqa: N812 | ||
|
||
return self._compliant_expr._from_call( | ||
F.hour, | ||
"hour", | ||
returns_scalar=self._compliant_expr._returns_scalar, | ||
) | ||
|
||
def minute(self: Self) -> SparkLikeExpr: | ||
from pyspark.sql import functions as F # noqa: N812 | ||
|
||
return self._compliant_expr._from_call( | ||
F.minute, | ||
"minute", | ||
returns_scalar=self._compliant_expr._returns_scalar, | ||
) | ||
|
||
def second(self: Self) -> SparkLikeExpr: | ||
from pyspark.sql import functions as F # noqa: N812 | ||
|
||
return self._compliant_expr._from_call( | ||
F.second, | ||
"second", | ||
returns_scalar=self._compliant_expr._returns_scalar, | ||
) | ||
|
||
def millisecond(self: Self) -> SparkLikeExpr: | ||
from pyspark.sql import functions as F # noqa: N812 | ||
|
||
def _millisecond(_input: Column) -> Column: | ||
return F.floor((F.unix_micros(_input) % 1_000_000) / 1000) | ||
|
||
return self._compliant_expr._from_call( | ||
_millisecond, | ||
"millisecond", | ||
returns_scalar=self._compliant_expr._returns_scalar, | ||
) | ||
|
||
def microsecond(self: Self) -> SparkLikeExpr: | ||
from pyspark.sql import functions as F # noqa: N812 | ||
|
||
def _microsecond(_input: Column) -> Column: | ||
return F.unix_micros(_input) % 1_000_000 | ||
|
||
return self._compliant_expr._from_call( | ||
_microsecond, | ||
"microsecond", | ||
returns_scalar=self._compliant_expr._returns_scalar, | ||
) | ||
|
||
def nanosecond(self: Self) -> SparkLikeExpr: | ||
from pyspark.sql import functions as F # noqa: N812 | ||
|
||
def _nanosecond(_input: Column) -> Column: | ||
return (F.unix_micros(_input) % 1_000_000) * 1000 | ||
|
||
return self._compliant_expr._from_call( | ||
_nanosecond, | ||
"nanosecond", | ||
returns_scalar=self._compliant_expr._returns_scalar, | ||
) | ||
|
||
def ordinal_day(self: Self) -> SparkLikeExpr: | ||
from pyspark.sql import functions as F # noqa: N812 | ||
|
||
return self._compliant_expr._from_call( | ||
F.dayofyear, | ||
"ordinal_day", | ||
returns_scalar=self._compliant_expr._returns_scalar, | ||
) | ||
|
||
def weekday(self: Self) -> SparkLikeExpr: | ||
from pyspark.sql import functions as F # noqa: N812 | ||
|
||
def _weekday(_input: Column) -> Column: | ||
# PySpark's dayofweek returns 1-7 for Sunday-Saturday | ||
return (F.dayofweek(_input) + 6) % 7 | ||
|
||
return self._compliant_expr._from_call( | ||
_weekday, | ||
"weekday", | ||
returns_scalar=self._compliant_expr._returns_scalar, | ||
) |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters