Skip to content

Commit

Permalink
total_kp renamed to no_of_karma_points
Browse files Browse the repository at this point in the history
  • Loading branch information
varshamahuli97 committed May 3, 2024
1 parent 1f53f22 commit b51517b
Show file tree
Hide file tree
Showing 2 changed files with 9 additions and 2 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -81,7 +81,7 @@ object UserReportModel extends AbsDashboardModel {
col("userID").alias("user_id"),
col("userOrgID").alias("mdo_id"),
col("userStatus").alias("status"),
col("total_points").alias("total_kp"),
col("total_points").alias("no_of_karma_points"),
col("fullName").alias("full_name"),
col("professionalDetails.designation").alias("designation"),
col("personalDetails.primaryEmail").alias("email"),
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@ import org.apache.spark.SparkContext
import org.apache.spark.sql.SparkSession
import org.apache.spark.sql.functions._
import org.ekstep.analytics.dashboard.DashboardUtil._
import org.ekstep.analytics.dashboard.DataUtil._
import org.ekstep.analytics.dashboard.{AbsDashboardModel, DashboardConfig}
import org.ekstep.analytics.framework.FrameworkContext

Expand All @@ -26,7 +27,7 @@ object DataWarehouseModel extends AbsDashboardModel {
var user_details = spark.read.option("header", "true")
.csv(s"${conf.localReportDir}/${conf.userReportPath}/${today}-warehouse")
user_details = user_details.withColumn("status", col("status").cast("int"))
.withColumn("total_kp", col("total_kp").cast("int"))
.withColumn("no_of_karma_points", col("no_of_karma_points").cast("int"))
truncateWarehouseTable(conf.dwUserTable)
saveDataframeToPostgresTable_With_Append(user_details, dwPostgresUrl, conf.dwUserTable, conf.dwPostgresUsername, conf.dwPostgresCredential)

Expand Down Expand Up @@ -99,6 +100,12 @@ object DataWarehouseModel extends AbsDashboardModel {

truncateWarehouseTable(conf.dwOrgTable)
saveDataframeToPostgresTable_With_Append(orgDwDf, dwPostgresUrl, conf.dwOrgTable, conf.dwPostgresUsername, conf.dwPostgresCredential)

var content_resource_details = spark.read.option("header", "true")
.csv(s"${conf.localReportDir}/${conf.courseReportPath}/${today}-resource-warehouse")

truncateWarehouseTable(conf.dwCourseResourceTable)
saveDataframeToPostgresTable_With_Append(content_resource_details, dwPostgresUrl, conf.dwCourseResourceTable, conf.dwPostgresUsername, conf.dwPostgresCredential)
}

}
Expand Down

0 comments on commit b51517b

Please sign in to comment.