Commit 822af0d1dc12fa9264ca6a5e6e896a8c42ed84f2

Authored by joel ago
0 parents
Exists in master

all

Showing 5 changed files with 457 additions and 0 deletions Side-by-side Diff

app/com/piki_ds/utils/hbase/PikiHbase.scala View file @ 822af0d
... ... @@ -0,0 +1,257 @@
  1 +package com.piki_ds.utils.hbase
  2 +
  3 +import com.piki.ds.utils.Base62
  4 +import org.apache.hadoop.hbase.{HColumnDescriptor, TableName, HTableDescriptor}
  5 +import org.apache.hadoop.hbase.client._
  6 +import org.apache.hadoop.hbase.filter.{PrefixFilter}
  7 +import org.apache.hadoop.hbase.util.{Bytes}
  8 +import org.slf4j.LoggerFactory
  9 +import scala.util.Random
  10 +
  11 +/**
  12 + * Created by pikicast on 2015. 7. 30..
  13 + */
  14 +object PikiHbase {
  15 + val logger = LoggerFactory.getLogger(getClass)
  16 + val log_err = LoggerFactory.getLogger("Global")
  17 + val conf = PikiHbaseConfig.conf
  18 +
  19 + def getConnection: HConnection = {
  20 + HConnectionManager.createConnection(conf)
  21 + }
  22 +
  23 + def listTables(connection: HConnection): Array[HTableDescriptor] = {
  24 + try {
  25 + connection.listTables()
  26 + } catch {
  27 + case e: Exception => {
  28 + e.printStackTrace()
  29 + Array.empty[HTableDescriptor]
  30 + }
  31 + }
  32 + }
  33 +
  34 + def get(connection: HConnection, table: HTableInterface, rowkey: String): Result = {
  35 + var result = new Result()
  36 + try {
  37 + val getObj= new Get(Bytes.toBytes(rowkey))
  38 + result = table.get(getObj)
  39 + } catch {
  40 + case e: Exception => {
  41 + e.printStackTrace()
  42 + new Result()
  43 + }
  44 + }
  45 + result
  46 + }
  47 +
  48 + def scanWithPrefixFilter(connection: HConnection, table: HTableInterface, filterKeyword: String): ResultScanner = {
  49 + try {
  50 +
  51 + val scan = new Scan()
  52 + val prefixFilter = new PrefixFilter(Bytes.toBytes(filterKeyword))
  53 + scan.setFilter(prefixFilter)
  54 + table.getScanner(scan)
  55 +
  56 + } catch {
  57 + case e: Exception => {
  58 + e.printStackTrace()
  59 + null
  60 + }
  61 + }
  62 + }
  63 +
  64 + //rowkey : { column family1 { column qualifier1 : value, column qualifier2 : value ... }...}
  65 + def put(connection: HConnection, table: HTableInterface, rowkey: String, cf: String, cq: String, value: String): Unit = {
  66 + val tableName = table.getName
  67 + try {
  68 +
  69 + logger.info(s"put data to hbase table: $tableName, key:$rowkey")
  70 +
  71 + val put = new Put(Bytes.toBytes(rowkey))
  72 + put.add(Bytes.toBytes(cf), Bytes.toBytes(cq), Bytes.toBytes(value))
  73 + table.put(put)
  74 +
  75 + } catch {
  76 + case e: Exception => {
  77 + log_err.error(s"put error hbase table: $tableName")
  78 + e.printStackTrace()
  79 + }
  80 + }
  81 + }
  82 +
  83 + //주의! table이 disable된 후, 스키마 변경됨. disable시키면 테이블 write가 되지 않으므로 서비스중에는 이 함수를 쓰지 않도록 한다.
  84 + def addColumnFamily(connection: HConnection, tableName: String, cfNames: Seq[String]): Unit = {
  85 + try {
  86 + val admin =new HBaseAdmin(PikiHbaseConfig.conf)
  87 + val tableDescriptor = new HTableDescriptor(TableName.valueOf(tableName))
  88 + admin.disableTable(tableName)
  89 + cfNames.map(cf => {
  90 + tableDescriptor.addFamily(new HColumnDescriptor(cf))
  91 + })
  92 + } catch {
  93 + case e: Exception => {
  94 + log_err.error(s"addColumnFamily error hbase table: $tableName")
  95 + e.printStackTrace()
  96 + }
  97 + }
  98 + }
  99 +
  100 + def createTable(tableName: String): Unit = {
  101 + try {
  102 + val admin = new HBaseAdmin(PikiHbaseConfig.conf)
  103 + val tableDescriptor = new HTableDescriptor(TableName.valueOf(tableName))
  104 + admin.createTable(tableDescriptor)
  105 + logger.info(s"table created $tableName")
  106 + } catch {
  107 + case e: Exception => {
  108 + log_err.error(s"create error hbase table: $tableName")
  109 + e.printStackTrace()
  110 + }
  111 + }
  112 + }
  113 +
  114 + def dropTable(tableName: String): Unit = {
  115 + try {
  116 + val admin = new HBaseAdmin(PikiHbaseConfig.conf)
  117 + val tableDescriptor = new HTableDescriptor(TableName.valueOf(tableName))
  118 + admin.disableTable(tableName)
  119 + admin.deleteTable(tableName)
  120 + logger.info(s"deleted table $tableName")
  121 + } catch {
  122 + case e: Exception => {
  123 + log_err.error(s"drop error hbase table: $tableName")
  124 + e.printStackTrace()
  125 + }
  126 + }
  127 + }
  128 +
  129 + def truncateTable(tableName: String): Unit = {
  130 + try {
  131 + val admin = new HBaseAdmin(PikiHbaseConfig.conf)
  132 + val tableDescriptor = new HTableDescriptor(TableName.valueOf(tableName))
  133 + admin.disableTable(tableName)
  134 + logger.info(s"disabled table $tableName")
  135 + admin.deleteTable(tableName)
  136 + logger.info(s"deleted table $tableName")
  137 + admin.createTable(tableDescriptor)
  138 + logger.info(s"created table $tableName")
  139 + } catch {
  140 + case e: Exception => {
  141 + log_err.error(s"truncate error hbase table: $tableName")
  142 + e.printStackTrace()
  143 + }
  144 + }
  145 + }
  146 +
  147 + def deleteRow(connection: HConnection, table: HTableInterface, rowKey: String) = {
  148 + try {
  149 + val delete = new Delete(Bytes.toBytes(rowKey))
  150 + table.delete(delete)
  151 + } catch {
  152 + case e: Exception => {
  153 + log_err.error(s"deleteRow error hbase table: ${table.getName}, rowkey: ${rowKey}")
  154 + e.printStackTrace()
  155 + }
  156 + }
  157 + }
  158 +
  159 + def main(args:Array[String]): Unit = {
  160 + val connection = PikiHbase.getConnection
  161 + val table = connection.getTable("mytable")
  162 +
  163 + // list the tables
  164 + val listtables = PikiHbase.listTables(connection)
  165 + listtables.foreach(println)
  166 +
  167 + println("put start")
  168 + //PikiHbase.put(connection, table, "key2" ,"f1", "c1", "hahahahaha")
  169 + println("put end")
  170 +
  171 + println("get start")
  172 + val result = PikiHbase.get(connection, table, "rowkey1")
  173 + val value = result.value()
  174 + println(Bytes.toString(value))
  175 + println("get end")
  176 +
  177 + println("put All start")
  178 +
  179 + //(1L to 99999999).map({ x =>
  180 + (1L to 2).map({ x =>
  181 + val l = s""""SESSION|START|1430677627828|39045405|A2.1.3|samsung-ks01lteskt|Android 4.4.2|2
  182 +COMMON|LOAD|1430677630274|39045405|m|{"contents":[{"h":[12399]},{"m":[96732,96114,96132,95832,96516,97710,97632,95400,97188,96378,97644,97662,93144,96612]},{"r1":[21,22,23,44,58,68,103,104,119,125,126,128,133,158,167,169,170,171,172,182,183,184,186,192,197,213,215,521,532,536,540,544,546,552,554,563,567,572,602,605,609,617,618,619,635,651,652,685,697,703,1040,1602,2093,2210,2744,3079,3602,3749,5122,5407,5735,9375,11849,12219,12477,12909,16495,16832,17945,20939,21067,22601,22835,26310,31320,32399,32413,44058,46659,55307,55437,55958,57554,60969,62100,63512,64178,72036,75468,75906,76878,78336,78936,86225,87077,89171,89819,91175,92213,94350]},{"m":[97230,96360,96456,97032,97614,96762]}]}
  183 +CONTENT|OPEN|1438306705052|43189353|126805|m|1438306694460|8|TOON
  184 +CONTENT|OPEN|1437700722042|${math.abs(Random.nextLong())}|${math.abs(Random.nextLong())}|m
  185 +CONTENT|OPEN|1437700722042|${math.abs(Random.nextLong())}|${math.abs(Random.nextLong())}|m
  186 +CONTENT|OPEN|1437700722042|${math.abs(Random.nextLong())}|${math.abs(Random.nextLong())}|m
  187 +CONTENT|OPEN|1437700722042|${math.abs(Random.nextLong())}|${math.abs(Random.nextLong())}|m
  188 +CONTENT|OPEN|1437700722042|${math.abs(Random.nextLong())}|${math.abs(Random.nextLong())}|m
  189 +CONTENT|OPEN|1437700722042|${math.abs(Random.nextLong())}|${math.abs(Random.nextLong())}|m
  190 +CONTENT|EXPOSURE|1430677630477|39045405|96132|3583|121|m
  191 +CONTENT|EXPOSURE|1430677630440|39045405|96114|3620|127|m
  192 +CONTENT|EXPOSURE|1430677630407|39045405|96732|3654|143|m
  193 +CONTENT|EXPOSURE|1430677630532|39045405|95832|3529|114|m
  194 +CONTENT|EXPOSURE|1430677630304|39045405|12399|3757|85|h
  195 +CONTENT|OPEN|1437700722042|${math.abs(Random.nextLong())}|${math.abs(Random.nextLong())}|m
  196 +CONTENT|OPEN|1437700722042|${math.abs(Random.nextLong())}|${math.abs(Random.nextLong())}|m
  197 +CONTENT|OPEN|1437700722042|${math.abs(Random.nextLong())}|${math.abs(Random.nextLong())}|m
  198 +CONTENT|OPEN|1437700722042|${math.abs(Random.nextLong())}|${math.abs(Random.nextLong())}|m
  199 +CONTENT|OPEN|1437700722042|${math.abs(Random.nextLong())}|${math.abs(Random.nextLong())}|m
  200 +CONTENT|OPEN|1437700722042|${math.abs(Random.nextLong())}|${math.abs(Random.nextLong())}|m
  201 +CONTENT|OPEN|1437700722042|${math.abs(Random.nextLong())}|${math.abs(Random.nextLong())}|m
  202 +CONTENT|OPEN|1437700722042|${math.abs(Random.nextLong())}|${math.abs(Random.nextLong())}|m
  203 +CONTENT|OPEN|1437700722042|${math.abs(Random.nextLong())}|${math.abs(Random.nextLong())}|m
  204 +CONTENT|OPEN|1437700722042|${math.abs(Random.nextLong())}|${math.abs(Random.nextLong())}|m
  205 +CONTENT|OPEN|1437700722042|${math.abs(Random.nextLong())}|${math.abs(Random.nextLong())}|m
  206 +CONTENT|OPEN|1437700722042|${math.abs(Random.nextLong())}|${math.abs(Random.nextLong())}|m
  207 +CONTENT|OPEN|1437700722042|${math.abs(Random.nextLong())}|${math.abs(Random.nextLong())}|m
  208 +CONTENT|OPEN|1437700722042|${math.abs(Random.nextLong())}|${math.abs(Random.nextLong())}|m
  209 +CONTENT|OPEN|1437700722042|${math.abs(Random.nextLong())}|${math.abs(Random.nextLong())}|m
  210 +CONTENT|OPEN|1437700722042|${math.abs(Random.nextLong())}|${math.abs(Random.nextLong())}|m
  211 +CONTENT|OPEN|1437700722042|${math.abs(Random.nextLong())}|${math.abs(Random.nextLong())}|m
  212 +CONTENT|OPEN|1437700722042|${math.abs(Random.nextLong())}|${math.abs(Random.nextLong())}|m
  213 +CONTENT|OPEN|1437700722042|${math.abs(Random.nextLong())}|${math.abs(Random.nextLong())}|m
  214 +CONTENT|OPEN|1437700722042|${math.abs(Random.nextLong())}|${math.abs(Random.nextLong())}|m
  215 +CONTENT|OPEN|1437700722042|${math.abs(Random.nextLong())}|${math.abs(Random.nextLong())}|m
  216 +CONTENT|OPEN|1437700722042|${math.abs(Random.nextLong())}|${math.abs(Random.nextLong())}|m
  217 +CONTENT|OPEN|1437700722042|${math.abs(Random.nextLong())}|${math.abs(Random.nextLong())}|m
  218 +CONTENT|OPEN|1437700722042|${math.abs(Random.nextLong())}|${math.abs(Random.nextLong())}|m
  219 +CONTENT|OPEN|1437700722042|${math.abs(Random.nextLong())}|${math.abs(Random.nextLong())}|m
  220 +CONTENT|OPEN|1437700722042|${math.abs(Random.nextLong())}|${math.abs(Random.nextLong())}|m
  221 +CONTENT|OPEN|1437700722042|${math.abs(Random.nextLong())}|${math.abs(Random.nextLong())}|m
  222 +CONTENT|OPEN|1437700722042|${math.abs(Random.nextLong())}|${math.abs(Random.nextLong())}|m
  223 +CONTENT|OPEN|1437700722042|${math.abs(Random.nextLong())}|${math.abs(Random.nextLong())}|m
  224 +CONTENT|OPEN|1437700722042|${math.abs(Random.nextLong())}|${math.abs(Random.nextLong())}|m
  225 +CONTENT|OPEN|1437700722042|${math.abs(Random.nextLong())}|${math.abs(Random.nextLong())}|m
  226 +CONTENT|OPEN|1437700722042|${math.abs(Random.nextLong())}|${math.abs(Random.nextLong())}|m
  227 +CARD|CONSUME|1430677732367|39045405|96132|14|2935
  228 +CARD|CONSUME|1430677738495|39045405|96132|15|6127
  229 +CARD|CONSUME|1430677743225|39045405|96132|16|4724
  230 +CARD|CONSUME|1430677745326|39045405|96132|17|2099
  231 +CONTENT|EXPOSURE|1430677752780|39045405|97632|2593|86|m
  232 +CONTENT|EXPOSURE|1430677751880|39045405|97710|3493|34|m"""
  233 +
  234 + val lines = l.split("\n").map(x => x.trim)
  235 + println(1)
  236 + //PikiHbase.putAll(connection, table, "f1", "id1", lines)
  237 +
  238 +
  239 + })
  240 +
  241 + println(Base62.encode(6030146367697000226L))
  242 + println(Base62.encode(6022397113819400614L))
  243 + println(Base62.encode(6929227236222314801L))
  244 + println("put All end")
  245 +
  246 + val uuid = "6055485486985007966"
  247 + println(Base62.decode("7DKBdnuNl78"))
  248 + //val result2 = PikiHbase.scanWithFilter(connection, table, uuid)
  249 +
  250 + //val value2 = result2.value()
  251 + //println(Bytes.toString(value2))
  252 +
  253 + table.close()
  254 + connection.close()
  255 +
  256 + }
  257 +}
app/com/piki_ds/utils/hbase/PikiHbaseConfig.scala View file @ 822af0d
... ... @@ -0,0 +1,39 @@
  1 +package com.piki_ds.utils.hbase
  2 +
  3 +import org.apache.commons.io.IOUtils
  4 +import org.apache.hadoop.hbase.{HBaseConfiguration}
  5 +import org.apache.hadoop.fs.Path
  6 +
  7 +object PikiHbaseConfig {
  8 + // 리소스 경로
  9 + val resourceBase = "/etc/hbase"
  10 + val hbaseSite = s"$resourceBase/hbase-site.xml"
  11 + val conf = HBaseConfiguration.create()
  12 + val hbaseSitePath = copyResourceFileToTmp(hbaseSite)
  13 + conf.addResource(new Path(hbaseSitePath.getPath))
  14 +
  15 + /**
  16 + * 리소스로부터 파일을 읽어서 임시파일로 저장한다
  17 + * @param resourcePath : 리소스 파일 경로
  18 + * @return : 저장한 파일의 File 객체
  19 + */
  20 + private def copyResourceFileToTmp(resourcePath: String): java.io.File = {
  21 + val inResource: java.io.InputStream = getClass.getResourceAsStream(resourcePath)
  22 + val (fileName, fileExt) = {
  23 + val fileNameExt = resourcePath.substring(resourcePath.lastIndexOf("/") + 1)
  24 + val fileName = fileNameExt.substring(0, fileNameExt.lastIndexOf("."))
  25 + val fileExt = fileNameExt.substring(fileNameExt.lastIndexOf("."))
  26 + (fileName, fileExt)
  27 + }
  28 +
  29 + val tempdir = System.getProperty("java.io.tmpdir")
  30 + val baseDir = s"/$tempdir"
  31 +
  32 + val tmpFile: java.io.File = java.io.File.createTempFile(fileName + "_", fileExt, new java.io.File(baseDir))
  33 + val outResource = new java.io.FileOutputStream(tmpFile)
  34 + IOUtils.copy(inResource, outResource)
  35 + inResource.close()
  36 + outResource.close()
  37 + tmpFile
  38 + }
  39 +}`
... ... @@ -0,0 +1,118 @@
  1 +import sbt.Keys._
  2 +import sbt._
  3 +//import com.github.play2war.plugin._
  4 +lazy val root = (project in file(".")).enablePlugins(PlayScala)
  5 +
  6 +name := "dsutils_hbase"
  7 +
  8 +version := "0.1.0-SNAPSHOT"
  9 +
  10 +organization := "com.piki_ds"
  11 +
  12 +scalaVersion := "2.11.6"
  13 +
  14 +aggregate in runMain := true
  15 +
  16 +val sprayV = "1.3.3"
  17 +
  18 +val sparkV = "1.3.1"
  19 +
  20 +//Play2WarKeys.servletVersion := "3.0"
  21 +
  22 +
  23 +//Play2WarPlugin.play2WarSettings
  24 +
  25 +//net.virtualvoid.sbt.graph.Plugin.graphSettings
  26 +
  27 +val hadoopversion = "2.6.0"
  28 +
  29 +libraryDependencies ++= Seq(
  30 + jdbc,
  31 + anorm,
  32 + cache,
  33 + ws,
  34 +"org.apache.hadoop" % "hadoop-hdfs" % hadoopversion exclude("commons-daemon", "commons-daemon"),
  35 + "org.apache.hadoop" % "hadoop-auth" % hadoopversion exclude("commons-daemon", "commons-daemon"),
  36 + "org.apache.hadoop" % "hadoop-client" % hadoopversion exclude("commons-daemon", "commons-daemon"),
  37 + "org.slf4j" % "slf4j-api" % "1.7.2",
  38 + "com.typesafe.akka" %% "akka-actor" % "2.3.9",
  39 + "com.typesafe.akka" %% "akka-remote" % "2.3.9",
  40 + "com.typesafe.akka" %% "akka-slf4j" % "2.3.9",
  41 + "io.spray" %% "spray-can" % sprayV,
  42 + "io.spray" %% "spray-caching" % sprayV,
  43 + "io.spray" %% "spray-routing" % sprayV,
  44 + "io.spray" %% "spray-client" % sprayV,
  45 + "com.google.code.findbugs" % "jsr305" % "2.0.3",
  46 + "com.mchange" % "c3p0" % "0.9.5",
  47 + "com.google.guava" % "guava" % "18.0",
  48 + "mysql" % "mysql-connector-java" % "5.1.32",
  49 + "org.json4s" %% "json4s-jackson" % "3.2.11",
  50 + "org.json4s" %% "json4s-ext" % "3.2.11",
  51 + "org.json4s" %% "json4s-native" % "3.2.11",
  52 + "org.mariadb.jdbc" % "mariadb-java-client" % "1.1.8",
  53 + "com.netaporter" %% "scala-uri" % "0.4.7",
  54 + "org.feijoas" %% "mango" % "0.11",
  55 + "com.twitter.penguin" % "korean-text" % "4.1.3piki-SNAPSHOT",
  56 + "com.piki_ds" %% "dsactors" % "0.1.0-SNAPSHOT",
  57 + "com.piki_ds" %% "dspikirepository" % "0.1.0-SNAPSHOT",
  58 + "com.twitter" %% "util-collection" % "6.23.0",
  59 + "org.scalatest" %% "scalatest" % "2.1.6" % "test",
  60 + "org.apache.spark" %% "spark-core" % sparkV,
  61 + "org.apache.spark" %% "spark-streaming" % sparkV,
  62 + "org.apache.spark" %% "spark-sql" % sparkV,
  63 + "org.apache.spark" %% "spark-mllib" % sparkV,
  64 + "org.apache.spark" %% "spark-repl" % sparkV,
  65 + "org.apache.spark" %% "spark-yarn" % sparkV,
  66 + "com.typesafe.akka" %% "akka-testkit" % "2.3.4" % "test"
  67 +)
  68 +
  69 +
  70 +resolvers ++= Seq(
  71 + "Typesafe repository" at "http://repo.typesafe.com/typesafe/releases/",
  72 + "Sonatype Releases" at "https://oss.sonatype.org/content/repositories/releases/",
  73 + "Scala Tools Snapshots" at "http://scala-tools.org/repo-snapshots/",
  74 + "twitter" at "http://maven.twttr.com/",
  75 + "spray repo" at "http://repo.spray.io/"
  76 +)
  77 +
  78 +resolvers += "Local Maven Repository" at "file://"+Path.userHome.absolutePath+"/.m2/repository"
  79 +
  80 +
  81 +javacOptions ++= Seq("-source", "1.7", "-target", "1.7")
  82 +
  83 +retrieveManaged := true
  84 +
  85 +//publishTo := Some("daum" at "http://maven.daumcorp.com/content/groups/daum-sqt-group/")
  86 +
  87 +publishMavenStyle := true
  88 +
  89 +//publishTo := Some("daum snapshot" at "http://maven.daumcorp.com/content/repositories/daum-sqt-snapshots")
  90 +
  91 +libraryDependencies ~= { _.map(_.excludeAll(
  92 + ExclusionRule("org.slf4j", "slf4j*")
  93 +))}
  94 +
  95 +
  96 +pomExtra := (
  97 + // <distributionManagement>
  98 + // <repository>
  99 + // <id>daum</id>
  100 + // <name>Daum Repository</name>
  101 + // <url>http://maven.daumcorp.com/content/repositories/daum</url>
  102 + // </repository>
  103 + // <snapshotRepository>
  104 + // <id>daum-snapshots</id>
  105 + // <name>Daum Snapshot Repository</name>
  106 + // <url>http://maven.daumcorp.com/content/repositories/daum-snapshots</url>
  107 + // </snapshotRepository>
  108 + // </distributionManagement>
  109 + <scm>
  110 + <url>http://digit.daumcorp.com/badend/arfapi</url>
  111 + <connection>scm:git:git@dgit.co:badend/arfapi.git</connection>
  112 + </scm>
  113 + <developers>
  114 + <developer>
  115 + <id>badend</id>
  116 + <name>badend</name>
  117 + </developer>
  118 + </developers>)
conf/etc/hbase/hbase-site.xml View file @ 822af0d
... ... @@ -0,0 +1,23 @@
  1 +<?xml version="1.0"?>
  2 +<?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
  3 +<configuration>
  4 + <property>
  5 + <name>hbase.cluster.distributed</name>
  6 + <value>true</value>
  7 + </property>
  8 + <property>
  9 + <name>hbase.rootdir</name>
  10 + <value>hdfs://KR-DATA-H1:9000/hbase</value>
  11 + </property>
  12 + <property>
  13 + <name>hbase.zookeeper.quorum</name>
  14 + <value>kr-data-h2,kr-data-h3,kr-data-h4</value>
  15 + </property>
  16 + <property>
  17 + <name>hbase.zookeeper.property.dataDir</name>
  18 + <value>/export/zookeeper</value>
  19 + <description>Property from ZooKeeper's config zoo.cfg.
  20 + The directory where the snapshot is stored.
  21 + </description>
  22 + </property>
  23 +</configuration>
project/plugins.sbt View file @ 822af0d
... ... @@ -0,0 +1,20 @@
  1 +logLevel := Level.Debug
  2 +
  3 +resolvers += "Typesafe repository" at "https://repo.typesafe.com/typesafe/releases/"
  4 +
  5 +// The Play plugin
  6 +addSbtPlugin("com.typesafe.play" % "sbt-plugin" % "2.3.8")
  7 +
  8 +// web plugins
  9 +
  10 +addSbtPlugin("com.typesafe.sbt" % "sbt-coffeescript" % "1.0.0")
  11 +
  12 +addSbtPlugin("com.typesafe.sbt" % "sbt-less" % "1.0.0")
  13 +
  14 +addSbtPlugin("com.typesafe.sbt" % "sbt-jshint" % "1.0.1")
  15 +
  16 +addSbtPlugin("com.typesafe.sbt" % "sbt-rjs" % "1.0.1")
  17 +
  18 +addSbtPlugin("com.typesafe.sbt" % "sbt-digest" % "1.0.0")
  19 +
  20 +addSbtPlugin("com.typesafe.sbt" % "sbt-mocha" % "1.0.0")