Skip to content

Commit

Permalink
Issue #235 Fix Documentation not showing up on the website (#343)
Browse files Browse the repository at this point in the history
  • Loading branch information
Amalicia authored Apr 25, 2023
1 parent 5fe194b commit 1deb9be
Show file tree
Hide file tree
Showing 23 changed files with 36 additions and 32 deletions.
2 changes: 1 addition & 1 deletion core/src/main/scala/doric/sem/JoinOps.scala
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@ import doric.{DoricColumn, DoricJoinColumn}

import org.apache.spark.sql.{DataFrame, Dataset}

private[sem] trait JoinOps {
protected trait JoinOps {
implicit class DataframeJoinSyntax[A](df: Dataset[A]) {

/**
Expand Down
2 changes: 1 addition & 1 deletion core/src/main/scala/doric/syntax/AggregationColumns.scala
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@ import doric.Doric
import org.apache.spark.sql.{Column, functions => f}
import org.apache.spark.sql.catalyst.expressions.aggregate.Sum

private[syntax] trait AggregationColumns {
protected trait AggregationColumns {

/**
* Aggregate function: returns the sum of all values in the expression.
Expand Down
22 changes: 13 additions & 9 deletions core/src/main/scala/doric/syntax/ArrayColumns.scala
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,7 @@ protected final case class Zipper[T1, T2, F[_]: CollectionType](
}
}

private[syntax] trait ArrayColumns {
protected trait ArrayColumns {

/**
* Concatenates multiple array columns together into a single column.
Expand Down Expand Up @@ -460,7 +460,8 @@ private[syntax] trait ArrayColumns {
* }}}
*
* @group Array Type
* @see [[org.apache.spark.sql.functions.exists]]
* @see org.apache.spark.sql.functions.exists
* @todo scaladoc link not available for spark 2.4
*/
def exists(fun: DoricColumn[T] => BooleanColumn): BooleanColumn = {
val xv = x(col.getIndex(0))
Expand Down Expand Up @@ -580,7 +581,8 @@ private[syntax] trait ArrayColumns {
* @note
* if `start` == 0 an exception will be thrown
* @group Array Type
* @see [[org.apache.spark.sql.functions.slice(x:org\.apache\.spark\.sql\.Column,start:org\.apache\.spark\.sql\.Column,length* org.apache.spark.sql.functions.slice]]
* @see org.apache.spark.sql.functions.slice
* @todo scaladoc link (issue #135)
*/
def slice(start: IntegerColumn, length: IntegerColumn): DoricColumn[F[T]] =
(col.elem, start.elem, length.elem)
Expand All @@ -606,12 +608,14 @@ private[syntax] trait ArrayColumns {
* Merge two given arrays, element-wise, into a single array using a function.
* If one array is shorter, nulls are appended at the end to match the length of the longer
* array, before applying the function.
*
* @example {{{
* df.select(colArray("val1").zipWith(col("val2"), concat(_, _)))
* }}}
*
* @group Array Type
* @see [[org.apache.spark.sql.functions.zip_with]]
* @see org.apache.spark.sql.functions.zip_with
* @todo scaladoc link not available for spark 2.4
*/
def zipWith[T2](
col2: DoricColumn[F[T2]]
Expand All @@ -638,8 +642,8 @@ private[syntax] trait ArrayColumns {
* The array in the first column is used for keys.
* The array in the second column is used for values.
*
* @throws RuntimeException if arrays doesn't have the same length.
* @throws RuntimeException if a key is null
* @throws java.lang.RuntimeException if arrays doesn't have the same length.
* or if a key is null
*
* @group Array Type
* @see [[org.apache.spark.sql.functions.map_from_arrays]]
Expand All @@ -652,8 +656,8 @@ private[syntax] trait ArrayColumns {
* The array in the first column is used for keys.
* The array in the second column is used for values.
*
* @throws RuntimeException if arrays doesn't have the same length.
* @throws RuntimeException if a key is null
* @throws java.lang.RuntimeException if arrays doesn't have the same length
* or if a key is null
*
* @group Array Type
* @see [[mapFromArrays]]
Expand All @@ -663,7 +667,7 @@ private[syntax] trait ArrayColumns {

/**
* Converts a column containing a StructType into a JSON string with the specified schema.
* @throws IllegalArgumentException in the case of an unsupported type.
* @throws java.lang.IllegalArgumentException in the case of an unsupported type.
*
* @group Array Type
* @see org.apache.spark.sql.functions.to_json(e:org\.apache\.spark\.sql\.Column,options:scala\.collection\.immutable\.Map\[java\.lang\.String,java\.lang\.String\]):* org.apache.spark.sql.functions.to_csv
Expand Down
2 changes: 1 addition & 1 deletion core/src/main/scala/doric/syntax/BinaryColumns.scala
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@ import doric.types.{BinaryType, SparkType}

import org.apache.spark.sql.{functions => f}

private[syntax] trait BinaryColumns {
protected trait BinaryColumns {

/**
* Concatenates multiple binary columns together into a single column.
Expand Down
2 changes: 1 addition & 1 deletion core/src/main/scala/doric/syntax/BooleanColumns.scala
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@ import doric.DoricColumn.sparkFunction

import org.apache.spark.sql.{functions => f}

private[syntax] trait BooleanColumns {
protected trait BooleanColumns {

/**
* Inversion of boolean expression, i.e. NOT.
Expand Down
2 changes: 1 addition & 1 deletion core/src/main/scala/doric/syntax/CommonColumns.scala
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@ import doric.types.{Casting, SparkType, UnsafeCasting}
import org.apache.spark.sql.{Column, functions => f}
import org.apache.spark.sql.catalyst.expressions.ArrayRepeat

private[syntax] trait CommonColumns extends ColGetters[NamedDoricColumn] {
protected trait CommonColumns extends ColGetters[NamedDoricColumn] {

/**
* Returns the first column that is not null, or null if all inputs are null.
Expand Down
2 changes: 1 addition & 1 deletion core/src/main/scala/doric/syntax/ControlStructures.scala
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@ package syntax

import doric.types.SparkType

private[syntax] trait ControlStructures {
protected trait ControlStructures {

/**
* Initialize a when builder
Expand Down
2 changes: 1 addition & 1 deletion core/src/main/scala/doric/syntax/DStructs.scala
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@ import shapeless.{::, HList, LabelledGeneric, Witness}
import scala.jdk.CollectionConverters._
import scala.language.dynamics

private[syntax] trait DStructs {
protected trait DStructs {

/**
* Creates a struct with the columns
Expand Down
2 changes: 1 addition & 1 deletion core/src/main/scala/doric/syntax/DateColumns.scala
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@ import java.sql.Date
import org.apache.spark.sql.{Column, functions => f}
import org.apache.spark.sql.catalyst.expressions.{AddMonths, CurrentDate, DateAdd, DateFormatClass, DateSub, MonthsBetween, NextDay, TruncDate, TruncTimestamp}

private[syntax] trait DateColumns {
protected trait DateColumns {

/**
* Returns the current date at the start of query evaluation as a date column.
Expand Down
2 changes: 1 addition & 1 deletion core/src/main/scala/doric/syntax/LiteralConversions.scala
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@ import doric.sem.Location
import doric.types.SparkType.Primitive
import doric.types.{LiteralSparkType, SparkType}

private[syntax] trait LiteralConversions {
protected trait LiteralConversions {

/**
* Creates a literal with the provided value.
Expand Down
2 changes: 1 addition & 1 deletion core/src/main/scala/doric/syntax/MapColumns.scala
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@ import org.apache.spark.sql.{Column, Row, functions => f}

import scala.jdk.CollectionConverters._

private[syntax] trait MapColumns {
protected trait MapColumns {

/**
* Returns the union of all the given maps.
Expand Down
2 changes: 1 addition & 1 deletion core/src/main/scala/doric/syntax/NumericColumns.scala
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@ import doric.types.{CollectionType, NumericType}
import org.apache.spark.sql.catalyst.expressions.{BRound, FormatNumber, FromUnixTime, Rand, Randn, Round, UnaryMinus}
import org.apache.spark.sql.{Column, functions => f}

private[syntax] trait NumericColumns {
protected trait NumericColumns {

/**
* Returns the current Unix timestamp (in seconds) as a long.
Expand Down
2 changes: 1 addition & 1 deletion core/src/main/scala/doric/syntax/StringColumns.scala
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@ import org.apache.spark.sql.{Column, functions => f}

import scala.jdk.CollectionConverters._

private[syntax] trait StringColumns {
protected trait StringColumns {

/**
* Concatenate string columns to form a single one
Expand Down
2 changes: 1 addition & 1 deletion core/src/main/scala/doric/syntax/TimestampColumns.scala
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@ import doric.types.{SparkType, TimestampType}
import org.apache.spark.sql.{Column, functions => f}
import org.apache.spark.sql.catalyst.expressions.{FromUTCTimestamp, ToUTCTimestamp}

private[syntax] trait TimestampColumns {
protected trait TimestampColumns {

/**
* Returns the current timestamp at the start of query evaluation as a timestamp column.
Expand Down
4 changes: 2 additions & 2 deletions core/src/main/scala/doric/syntax/package.scala
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@ package object syntax {
* @tparam K type of "key" doric column to perform the search
* @tparam V type of "value" doric column result
*/
@inline private[syntax] def elementAtAbstract[T, K, V](
@inline def elementAtAbstract[T, K, V](
dc: DoricColumn[T],
key: DoricColumn[K]
): DoricColumn[V] = {
Expand All @@ -39,7 +39,7 @@ package object syntax {
* @param dc doric column to be reversed
* @tparam T type of doric column (string or array)
*/
@inline private[syntax] def reverseAbstract[T](
@inline def reverseAbstract[T](
dc: DoricColumn[T]
): DoricColumn[T] =
dc.elem.map(f.reverse).toDC
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@ import doric.types.{BinaryType, SparkType}
import org.apache.spark.sql.catalyst.expressions.Decode
import org.apache.spark.sql.Column

private[syntax] trait BinaryColumns30_31 {
protected trait BinaryColumns30_31 {

implicit class BinaryOperationsSyntax30_31[T: BinaryType: SparkType](
column: DoricColumn[T]
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@ import org.apache.spark.sql.Column
import org.apache.spark.sql.{functions => f}
import org.apache.spark.sql.catalyst.expressions.{ShiftLeft, ShiftRight, ShiftRightUnsigned}

private[syntax] trait NumericColumns2_31 {
protected trait NumericColumns2_31 {

/**
* INTEGRAL OPERATIONS
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@ import doric.types.DoubleC

import org.apache.spark.sql.{functions => f}

private[syntax] trait AggregationColumns31 {
protected trait AggregationColumns31 {

/**
* Aggregate function: returns the approximate `percentile` of the numeric column `col` which
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@ import cats.implicits._

import org.apache.spark.sql.{functions => f}

private[syntax] trait BooleanColumns31 {
protected trait BooleanColumns31 {

/**
* @group Boolean Type
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@ package syntax
import doric.types.NumericType
import org.apache.spark.sql.{functions => f}

private[syntax] trait NumericColumns31 {
protected trait NumericColumns31 {
implicit class NumericOperationsSyntax31[T: NumericType](
column: DoricColumn[T]
) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@ package syntax
import doric.sem.Location
import org.apache.spark.sql.{functions => f}

private[syntax] trait StringColumns31 {
protected trait StringColumns31 {

/**
* Throws an exception with the provided error message.
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@ import doric.types.{BinaryType, SparkType}
import org.apache.spark.sql.Column
import org.apache.spark.sql.catalyst.expressions.StringDecode

private[syntax] trait BinaryColumns32 {
protected trait BinaryColumns32 {

implicit class BinaryOperationsSyntax32[T: BinaryType: SparkType](
column: DoricColumn[T]
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@ import org.apache.spark.sql.Column
import org.apache.spark.sql.{functions => f}
import org.apache.spark.sql.catalyst.expressions.{ShiftLeft, ShiftRight, ShiftRightUnsigned}

private[syntax] trait NumericColumns32 {
protected trait NumericColumns32 {

/**
* INTEGRAL OPERATIONS
Expand Down

0 comments on commit 1deb9be

Please sign in to comment.