Commit 783fe3a33d5a3b22595231d4812b96eefbf76932

Authored by Joanne ago
1 parent d74fa3aabe
Exists in master

temp insert

Showing 1 changed file with 2 additions and 2 deletions Inline Diff

app/com/piki_ds/ver2ggh/gghScore.scala View file @ 783fe3a
/** 1 1 /**
* Created by Evan on 2016. 5. 18.. 2 2 * Created by Evan on 2016. 5. 18..
*/ 3 3 */
4 4
package com.piki_ds.ver2ggh 5 5 package com.piki_ds.ver2ggh
6 6
import com.piki_ds.utils.hbase.HbaseInserter 7 7 import com.piki_ds.utils.hbase.HbaseInserter
import com.piki_ds.ver2ggh 8 8 import com.piki_ds.ver2ggh
import com.piki_ds.data.contents.report.Util 9 9 import com.piki_ds.data.contents.report.Util
import java.util.Date 10 10 import java.util.Date
import org.apache.hadoop.fs.Path 11 11 import org.apache.hadoop.fs.Path
import org.apache.spark.SparkContext 12 12 import org.apache.spark.SparkContext
import org.apache.spark.mllib.linalg.Vectors 13 13 import org.apache.spark.mllib.linalg.Vectors
import org.apache.spark.mllib.regression.LabeledPoint 14 14 import org.apache.spark.mllib.regression.LabeledPoint
import org.apache.spark.mllib.tree.model.RandomForestModel 15 15 import org.apache.spark.mllib.tree.model.RandomForestModel
import org.apache.spark.rdd.RDD 16 16 import org.apache.spark.rdd.RDD
import org.apache.spark.sql.{DataFrame, SaveMode, SQLContext} 17 17 import org.apache.spark.sql.{DataFrame, SaveMode, SQLContext}
import org.apache.spark.sql.functions._ 18 18 import org.apache.spark.sql.functions._
19 19
20 20
object gghScore { 21 21 object gghScore {
22 22
val sc = SparkContext.getOrCreate() 23 23 val sc = SparkContext.getOrCreate()
val sqlContext = SQLContext.getOrCreate(sc) 24 24 val sqlContext = SQLContext.getOrCreate(sc)
val hadoopConf = new org.apache.hadoop.conf.Configuration() 25 25 val hadoopConf = new org.apache.hadoop.conf.Configuration()
val hdfs = org.apache.hadoop.fs.FileSystem.get(new 26 26 val hdfs = org.apache.hadoop.fs.FileSystem.get(new
java.net.URI("hdfs://pikinn"), hadoopConf) 27 27 java.net.URI("hdfs://pikinn"), hadoopConf)
import sqlContext.implicits._ 28 28 import sqlContext.implicits._
29 29
def main(args: Array[String]): Unit = { 30 30 def main(args: Array[String]): Unit = {
31 31
val format = new java.text.SimpleDateFormat("yyyyMMdd") 32 32 val format = new java.text.SimpleDateFormat("yyyyMMdd")
val currentTime = new Date() 33 33 val currentTime = new Date()
34 34
val day_delm = 24 * 1000 * 60 * 60L 35 35 val day_delm = 24 * 1000 * 60 * 60L
val saveDay = if (args.length >= 1) args(0) else format.format(currentTime.getTime - day_delm) 36 36 val saveDay = if (args.length >= 1) args(0) else format.format(currentTime.getTime - day_delm)
val ind = -6 to 0 37 37 val ind = -6 to 0
val dateSet = ind.map(x => { 38 38 val dateSet = ind.map(x => {
format.format(format.parse(saveDay).getTime + day_delm * x) 39 39 format.format(format.parse(saveDay).getTime + day_delm * x)
}) 40 40 })
41 41
val hadoopConf = sc.hadoopConfiguration 42 42 val hadoopConf = sc.hadoopConfiguration
val fs = org.apache.hadoop.fs.FileSystem.get(hadoopConf) 43 43 val fs = org.apache.hadoop.fs.FileSystem.get(hadoopConf)
44 44
dateSet.map{x => 45 45 dateSet.map{x =>
val addrs = s"hdfs://pikinn/user/evan/Features/table=expConTime/dt=${x}" 46 46 val addrs = s"hdfs://pikinn/user/evan/Features/table=expConTime/dt=${x}"
val out = if(!fs.exists((new Path(addrs)))) {x} else "null" 47 47 val out = if(!fs.exists((new Path(addrs)))) {x} else "null"
out 48 48 out
}.filter(x => x != "null").map(x => ver2ggh.expConTime.main(Array(s"${x}"))) 49 49 }.filter(x => x != "null").map(x => ver2ggh.expConTime.main(Array(s"${x}")))
50 50
val expConsume = dateSet.map { x => 51 51 val expConsume = dateSet.map { x =>
val expConsumeOut = sqlContext.read.parquet(s"hdfs://pikinn/user/evan/Features/table=expConTime/dt=${x}").drop("table").drop("dt"). 52 52 val expConsumeOut = sqlContext.read.parquet(s"hdfs://pikinn/user/evan/Features/table=expConTime/dt=${x}").drop("table").drop("dt").
where("fromKey = 'm' or (fromKey = 'h' and position = 0)") 53 53 where("fromKey = 'm' or (fromKey = 'h' and position = 0)")
expConsumeOut 54 54 expConsumeOut
}.reduce((x,y) => x.unionAll(y)) 55 55 }.reduce((x,y) => x.unionAll(y))
56 56
/// 컨텐츠 카드 수별 카드 그룹 생성 5단위 57 57 /// 컨텐츠 카드 수별 카드 그룹 생성 5단위
val cardSize = Util.tables("MG_CARD").where("status='ACTV'").groupBy("contents_id").agg(expr("count(ordering) as cardSize")). 58 58 val cardSize = Util.tables("MG_CARD").where("status='ACTV'").groupBy("contents_id").agg(expr("count(ordering) as cardSize")).
map(x => (x.getAs[Long]("contents_id"), x.getAs[Long]("cardSize"), x.getAs[Long]("cardSize")/5)).toDF("cid1","cardSize","sizeGroup"). 59 59 map(x => (x.getAs[Long]("contents_id"), x.getAs[Long]("cardSize"), x.getAs[Long]("cardSize")/5)).toDF("cid1","cardSize","sizeGroup").
selectExpr("*","if(sizeGroup>19, 20, sizeGroup) as cardGroup").drop("sizeGroup") 60 60 selectExpr("*","if(sizeGroup>19, 20, sizeGroup) as cardGroup").drop("sizeGroup")
/// kpi3 지표로 부터 카드 그룹별 평균 컨텐츠 소비시간 계산 61 61 /// kpi3 지표로 부터 카드 그룹별 평균 컨텐츠 소비시간 계산
//val kpi3 = Util.readDashboardTable("kpi3","*","{*}","*", "*").selectExpr("cid","udate","appView","consumeTime","numberOfCard").dropDuplicates(Seq("cid")). 62 62 //val kpi3 = Util.readDashboardTable("kpi3","*","{*}","*", "*").selectExpr("cid","udate","appView","consumeTime","numberOfCard").dropDuplicates(Seq("cid")).
// join(cardSize, column("cid")===cardSize("cid1")).drop(cardSize("cid1")).where("numberOfCard is not null").cache 63 63 // join(cardSize, column("cid")===cardSize("cid1")).drop(cardSize("cid1")).where("numberOfCard is not null").cache
//val cardGroupConsume = kpi3.where("consumeTime > 10.0").groupBy("cardGroup").agg(expr("avg(consumeTime) * 1000 as cardGroupConTime")).selectExpr("cardGroup as cardGroup1", "cardGroupConTime") 64 64 //val cardGroupConsume = kpi3.where("consumeTime > 10.0").groupBy("cardGroup").agg(expr("avg(consumeTime) * 1000 as cardGroupConTime")).selectExpr("cardGroup as cardGroup1", "cardGroupConTime")
// kpi3.map(x => s"${x(0)}|${x(1)}|${x(2)}|${x(3)}|${x(4)}|${x(5)}|${x(6)}").coalesce(1, shuffle = true).saveAsTextFile(s"hdfs://pikinn/user/evan/Features/kpi3") 65 65 // kpi3.map(x => s"${x(0)}|${x(1)}|${x(2)}|${x(3)}|${x(4)}|${x(5)}|${x(6)}").coalesce(1, shuffle = true).saveAsTextFile(s"hdfs://pikinn/user/evan/Features/kpi3")
// cardGroupConsume.stat.corr("cardGroup1","cardGroupConTime") cardGroup 과 소비시간평균과의 상관관계 0.89 66 66 // cardGroupConsume.stat.corr("cardGroup1","cardGroupConTime") cardGroup 과 소비시간평균과의 상관관계 0.89
67 67
/// 68 68 ///
val cardtype = Util.tables("MG_CARD").where("status='ACTV'").selectExpr("contents_id as cid","card_id as card_id", 69 69 val cardtype = Util.tables("MG_CARD").where("status='ACTV'").selectExpr("contents_id as cid","card_id as card_id",
"if(card_type = 'DYNAMIC', 'PHOTO', if(card_type like '%SNS%', 'SNS', card_type)) as card_type") 70 70 "if(card_type = 'DYNAMIC', 'PHOTO', if(card_type like '%SNS%', 'SNS', card_type)) as card_type")
71 71
val cidCardType = cardtype.groupBy("cid","card_type").agg(expr("count(*) as count")).select( 72 72 val cidCardType = cardtype.groupBy("cid","card_type").agg(expr("count(*) as count")).select(
expr("cid"), 73 73 expr("cid"),
expr("case when card_type = 'LANDING' then count else 0 end as LANDING"), 74 74 expr("case when card_type = 'LANDING' then count else 0 end as LANDING"),
expr("case when card_type = 'SHOPPING' then count else 0 end as SHOPPING"), 75 75 expr("case when card_type = 'SHOPPING' then count else 0 end as SHOPPING"),
expr("case when card_type = 'PHOTO' then count else 0 end as PHOTO"), 76 76 expr("case when card_type = 'PHOTO' then count else 0 end as PHOTO"),
expr("case when card_type = 'SNS' then count else 0 end as SNS"), 77 77 expr("case when card_type = 'SNS' then count else 0 end as SNS"),
expr("case when card_type = 'PANORAMA' then count else 0 end as PANORAMA"), 78 78 expr("case when card_type = 'PANORAMA' then count else 0 end as PANORAMA"),
expr("case when card_type = 'TEXT' then count else 0 end as TEXT"), 79 79 expr("case when card_type = 'TEXT' then count else 0 end as TEXT"),
expr("case when card_type = 'YOUTUBE' then count else 0 end as YOUTUBE"), 80 80 expr("case when card_type = 'YOUTUBE' then count else 0 end as YOUTUBE"),
expr("case when card_type = 'INTR' then count else 0 end as INTR"), 81 81 expr("case when card_type = 'INTR' then count else 0 end as INTR"),
expr("case when card_type = 'VIDEO' then count else 0 end as VIDEO") 82 82 expr("case when card_type = 'VIDEO' then count else 0 end as VIDEO")
).groupBy("cid").agg(expr("sum(LANDING) as LANDING"), expr("sum(SHOPPING) as SHOPPING"), expr("sum(PHOTO) as PHOTO"), 83 83 ).groupBy("cid").agg(expr("sum(LANDING) as LANDING"), expr("sum(SHOPPING) as SHOPPING"), expr("sum(PHOTO) as PHOTO"),
expr("sum(SNS) as SNS"),expr("sum(PANORAMA) as PANORAMA"),expr("sum(TEXT) as TEXT"),expr("sum(YOUTUBE) as YOUTUBE"), 84 84 expr("sum(SNS) as SNS"),expr("sum(PANORAMA) as PANORAMA"),expr("sum(TEXT) as TEXT"),expr("sum(YOUTUBE) as YOUTUBE"),
expr("sum(INTR) as INTR"),expr("sum(VIDEO) as VIDEO")) 85 85 expr("sum(INTR) as INTR"),expr("sum(VIDEO) as VIDEO"))
86 86
87 87
val contentsType = Util.tables("MG_CONTENTS").where("status='ACTV'").select( 88 88 val contentsType = Util.tables("MG_CONTENTS").where("status='ACTV'").select(
expr("contents_id as cid"), 89 89 expr("contents_id as cid"),
expr("case when contents_type = 'ALBUM' then 1 else 0 end as ALBUM"), 90 90 expr("case when contents_type = 'ALBUM' then 1 else 0 end as ALBUM"),
expr("case when contents_type = 'ALBUM.A' then 1 else 0 end as ALBUM_A"), 91 91 expr("case when contents_type = 'ALBUM.A' then 1 else 0 end as ALBUM_A"),
expr("case when contents_type = 'CHST' then 1 else 0 end as CHST"), 92 92 expr("case when contents_type = 'CHST' then 1 else 0 end as CHST"),
expr("case when contents_type = 'CHST.A' then 1 else 0 end as CHST_A"), 93 93 expr("case when contents_type = 'CHST.A' then 1 else 0 end as CHST_A"),
expr("case when contents_type = 'TOON' then 1 else 0 end as TOON"), 94 94 expr("case when contents_type = 'TOON' then 1 else 0 end as TOON"),
expr("case when contents_type = 'LIVE' then 1 else 0 end as LIVE") 95 95 expr("case when contents_type = 'LIVE' then 1 else 0 end as LIVE")
) 96 96 )
97 97
val cidCardTypeSize = cidCardType.join(cardSize, cidCardType("cid")===cardSize("cid1"),"leftouter").drop(cardSize("cid1")).drop(cardSize("cardGroup")). 98 98 val cidCardTypeSize = cidCardType.join(cardSize, cidCardType("cid")===cardSize("cid1"),"leftouter").drop(cardSize("cid1")).drop(cardSize("cardGroup")).
join(contentsType, cidCardType("cid")===contentsType("cid")).drop(contentsType("cid")) 99 99 join(contentsType, cidCardType("cid")===contentsType("cid")).drop(contentsType("cid"))
100 100
val predData = cidCardTypeSize.map { line => 101 101 val predData = cidCardTypeSize.map { line =>
LabeledPoint(line.getAs[Long]("cid"), Vectors.dense(line.getAs[Long]("cardSize").toDouble, line.getAs[Long]("LANDING").toDouble, 102 102 LabeledPoint(line.getAs[Long]("cid"), Vectors.dense(line.getAs[Long]("cardSize").toDouble, line.getAs[Long]("LANDING").toDouble,
line.getAs[Long]("SHOPPING").toDouble, line.getAs[Long]("PHOTO").toDouble, line.getAs[Long]("SNS").toDouble, line.getAs[Long]("PANORAMA").toDouble, 103 103 line.getAs[Long]("SHOPPING").toDouble, line.getAs[Long]("PHOTO").toDouble, line.getAs[Long]("SNS").toDouble, line.getAs[Long]("PANORAMA").toDouble,
line.getAs[Long]("TEXT").toDouble, line.getAs[Long]("YOUTUBE").toDouble, line.getAs[Long]("INTR").toDouble, line.getAs[Long]("VIDEO").toDouble, 104 104 line.getAs[Long]("TEXT").toDouble, line.getAs[Long]("YOUTUBE").toDouble, line.getAs[Long]("INTR").toDouble, line.getAs[Long]("VIDEO").toDouble,
line.getAs[Int]("ALBUM").toDouble, line.getAs[Int]("ALBUM_A").toDouble, line.getAs[Int]("CHST").toDouble, line.getAs[Int]("CHST_A").toDouble, 105 105 line.getAs[Int]("ALBUM").toDouble, line.getAs[Int]("ALBUM_A").toDouble, line.getAs[Int]("CHST").toDouble, line.getAs[Int]("CHST_A").toDouble,
line.getAs[Int]("TOON").toDouble, line.getAs[Int]("LIVE").toDouble 106 106 line.getAs[Int]("TOON").toDouble, line.getAs[Int]("LIVE").toDouble
)) 107 107 ))
} 108 108 }
109 109
val RFmodel = RandomForestModel.load(sc, s"hdfs://pikinn/user/evan/Features/cardTypeConsume/RFModel") 110 110 val RFmodel = RandomForestModel.load(sc, s"hdfs://pikinn/user/evan/Features/cardTypeConsume/RFModel")
val predResult = predData.collect.map { point => 111 111 val predResult = predData.collect.map { point =>
val prediction = RFmodel.predict(point.features) 112 112 val prediction = RFmodel.predict(point.features)
(point.label.toLong, prediction) 113 113 (point.label.toLong, prediction)
} 114 114 }
115 115
val cidPredConsume = sc.parallelize(predResult).toDF("cid1","predConsume").withColumn("predConsume", column("predConsume")*1000) 116 116 val cidPredConsume = sc.parallelize(predResult).toDF("cid1","predConsume").withColumn("predConsume", column("predConsume")*1000)
117 117
/// 노출 위치별 컨텐츠 소비가 이뤄지기 까지의 노출시간 평균 118 118 /// 노출 위치별 컨텐츠 소비가 이뤄지기 까지의 노출시간 평균
val posExpTime = expConsume.groupBy("fromKey","position").agg(expr("sum(expTime2)/sum(expSize2) as posExpTime")). 119 119 val posExpTime = expConsume.groupBy("fromKey","position").agg(expr("sum(expTime2)/sum(expSize2) as posExpTime")).
selectExpr("fromKey as fromKey1", "position as position1", "posExpTime") 120 120 selectExpr("fromKey as fromKey1", "position as position1", "posExpTime")
121 121
/// 위치 별 ctr 122 122 /// 위치 별 ctr
val positionCtr = expConsume.groupBy("fromKey","position").agg(expr("sum(expSize2) as expSize"), expr("count(consume) as consumeCnt")). 123 123 val positionCtr = expConsume.groupBy("fromKey","position").agg(expr("sum(expSize2) as expSize"), expr("count(consume) as consumeCnt")).
withColumn("rankingCtr", column("consumeCnt")/column("expSize")).selectExpr("fromKey as fromKey1","position as position1","rankingCtr") 124 124 withColumn("rankingCtr", column("consumeCnt")/column("expSize")).selectExpr("fromKey as fromKey1","position as position1","rankingCtr")
125 125
/// 컨텐츠, 노출 위치 별 노출 시간 및 소비시간 126 126 /// 컨텐츠, 노출 위치 별 노출 시간 및 소비시간
val cidPositionInfo = expConsume.groupBy("cid","fromKey","position"). 127 127 val cidPositionInfo = expConsume.groupBy("cid","fromKey","position").
agg(//expr("sum(expTime1) as expTime1"), expr("count(expSize1) as expSize1"), 128 128 agg(//expr("sum(expTime1) as expTime1"), expr("count(expSize1) as expSize1"),
expr("sum(expTime2) as expTime2"), expr("sum(expSize2) as expSize2"), expr("sum(consume) as consume"), expr("count(consume) as conCount")). 129 129 expr("sum(expTime2) as expTime2"), expr("sum(expSize2) as expSize2"), expr("sum(consume) as consume"), expr("count(consume) as conCount")).
join(positionCtr, column("fromKey")===positionCtr("fromKey1") && column("position")===positionCtr("position1"), "leftouter"). 130 130 join(positionCtr, column("fromKey")===positionCtr("fromKey1") && column("position")===positionCtr("position1"), "leftouter").
drop(positionCtr("fromKey1")).drop(positionCtr("position1")). 131 131 drop(positionCtr("fromKey1")).drop(positionCtr("position1")).
join(cardSize, column("cid")===cardSize("cid1"), "leftouter").drop(cardSize("cid1")).na.fill(0, Seq("consume")). 132 132 join(cardSize, column("cid")===cardSize("cid1"), "leftouter").drop(cardSize("cid1")).na.fill(0, Seq("consume")).
where("expSize2 > 200 and cardSize is not null") 133 133 where("expSize2 > 200 and cardSize is not null")
134 134
val gghtmp = cidPositionInfo.join(posExpTime, cidPositionInfo("fromKey")===posExpTime("fromKey1") && cidPositionInfo("position")===posExpTime("position1"), "leftouter"). 135 135 val gghtmp = cidPositionInfo.join(posExpTime, cidPositionInfo("fromKey")===posExpTime("fromKey1") && cidPositionInfo("position")===posExpTime("position1"), "leftouter").
drop(posExpTime("fromKey1")).drop(posExpTime("position1")). 136 136 drop(posExpTime("fromKey1")).drop(posExpTime("position1")).
join(cidPredConsume, cidPositionInfo("cid")===cidPredConsume("cid1"), "leftouter").drop(cidPredConsume("cid1")) 137 137 join(cidPredConsume, cidPositionInfo("cid")===cidPredConsume("cid1"), "leftouter").drop(cidPredConsume("cid1"))
138 138
/* 139 139 /*
val gghBase3 = gghtmp.selectExpr("*", "consume/(expSize2*rankingCtr*predConsume) as consumeEff", "expTime2/(expSize2*posExpTime) as expEff"). 140 140 val gghBase3 = gghtmp.selectExpr("*", "consume/(expSize2*rankingCtr*predConsume) as consumeEff", "expTime2/(expSize2*posExpTime) as expEff").
withColumn("ggh", column("consumeEff")/column("expEff")) 141 141 withColumn("ggh", column("consumeEff")/column("expEff"))
val gghVer2 = gghBase3.groupBy("cid").agg(expr("sum(expSize2) as totalExpSize2"), expr("sum(consumeEff)/count(*) as consumeEff"), 142 142 val gghVer2 = gghBase3.groupBy("cid").agg(expr("sum(expSize2) as totalExpSize2"), expr("sum(consumeEff)/count(*) as consumeEff"),
expr("sum(expEff)/count(*) as expEff")).withColumn("ggh", column("consumeEff")/column("expEff")).where("totalExpSize2 > 1000") 143 143 expr("sum(expEff)/count(*) as expEff")).withColumn("ggh", column("consumeEff")/column("expEff")).where("totalExpSize2 > 1000")
*/ 144 144 */
145 145
val gghBase4 = gghtmp.selectExpr("*", "expSize2*rankingCtr as expectConCnt", "expTime2/(expSize2*posExpTime) as expEff") 146 146 val gghBase4 = gghtmp.selectExpr("*", "expSize2*rankingCtr as expectConCnt", "expTime2/(expSize2*posExpTime) as expEff")
147 147
val gghVer3 = gghBase4.groupBy("cid").agg(expr("sum(expSize2) as totalExpSize2"), expr("sum(consume) as consume"), 148 148 val gghVer3 = gghBase4.groupBy("cid").agg(expr("sum(expSize2) as totalExpSize2"), expr("sum(consume) as consume"),
expr("sum(expectConCnt) as expectConCnt"), expr("sum(expEff)/count(*) as expEff")). 149 149 expr("sum(expectConCnt) as expectConCnt"), expr("sum(expEff)/count(*) as expEff")).
join(cidPredConsume, cidPositionInfo("cid")===cidPredConsume("cid1"), "leftouter").drop(cidPredConsume("cid1")). 150 150 join(cidPredConsume, cidPositionInfo("cid")===cidPredConsume("cid1"), "leftouter").drop(cidPredConsume("cid1")).
selectExpr("cid","totalExpSize2","consume/(expectConCnt * predConsume) as consumeEff", "expEff").withColumn("ggh", column("consumeEff")/column("expEff")). 151 151 selectExpr("cid","totalExpSize2","consume/(expectConCnt * predConsume) as consumeEff", "expEff").withColumn("ggh", column("consumeEff")/column("expEff")).
where("totalExpSize2 > 1000") 152 152 where("totalExpSize2 > 1000")
153 153
val gghMean = gghVer3.describe().where("summary = 'mean'").drop("summary").take(1)(0)(4).toString.toDouble 154 154 val gghMean = gghVer3.describe().where("summary = 'mean'").drop("summary").take(1)(0)(4).toString.toDouble
val gghStd = gghVer3.describe().where("summary = 'stddev'").drop("summary").take(1)(0)(4).toString.toDouble 155 155 val gghStd = gghVer3.describe().where("summary = 'stddev'").drop("summary").take(1)(0)(4).toString.toDouble
156 156
val gghScaled = gghVer3.withColumn("gghScaled", (column("ggh") - gghMean) / gghStd).selectExpr("*", "1000 / (1 + exp(-gghScaled)) as scaledGgh").drop("gghScaled") 157 157 val gghScaled = gghVer3.withColumn("gghScaled", (column("ggh") - gghMean) / gghStd).selectExpr("*", "1000 / (1 + exp(-gghScaled)) as scaledGgh").drop("gghScaled")
158 158 /*
gghScaled.map{x => 159 159 gghScaled.map{x =>
s"${x(0)},${x.getAs[Double]("scaledGgh").toInt}" 160 160 s"${x(0)},${x.getAs[Double]("scaledGgh").toInt}"
}.saveAsTextFile(s"hdfs://pikinn/preprocess/timelineScore/content/ggh/$saveDay") 161 161 }.saveAsTextFile(s"hdfs://pikinn/preprocess/timelineScore/content/ggh/$saveDay")
162 162 */
val finalScore = gghScaled.map(x=>(x(0),x.getAs[Double]("scaledGgh").toInt)) 163 163 val finalScore = gghScaled.map(x=>(x(0),x.getAs[Double]("scaledGgh").toInt))