Commit 858cabbac54c347d1ac12a0408fd75a54907a3dc

Authored by joel ago
1 parent 1b1fe5c7a3
Exists in master

initial setting

Showing 2 changed files with 94 additions and 12 deletions Side-by-side Diff

app/com/piki_ds/utils/hbase/Base62.scala View file @ 858cabb
  1 +package com.piki_ds.utils.hbase
  2 +
  3 +import java.util.Date
  4 +
  5 +/**
  6 + * Created by jihoonkang on 5/15/15.
  7 + */
  8 +
  9 +
  10 +object Base62 {
  11 + val base62 = new Base62()
  12 +
  13 + def decode(s:String) = {
  14 + base62.decode(s)
  15 + }
  16 + def encode(i:Long) = {
  17 + base62.encode(i)
  18 + }
  19 +
  20 + def main(args:Array[String]) = {
  21 + //println(Base62.encode(99642))
  22 + println(new Date(Base62.decode("P8to8Y4")))
  23 + }
  24 +}
  25 +
  26 +class Base62(baseString: String = ((0 to 9) ++ ('A' to 'Z') ++ ('a' to 'z')).mkString) {
  27 +
  28 + val base = 62
  29 +
  30 + def using[A, R <: { def close() }](r: R)(f: R => A): A =
  31 + try { f(r) } finally { r.close() }
  32 +
  33 + if (baseString.size != base) {
  34 + throw new IllegalArgumentException("baseString length must be %d".format(base))
  35 + }
  36 +
  37 + def decode(s: String): Long = {
  38 + s.zip(s.indices.reverse)
  39 + .map { case (c, p) => baseString.indexOf(c) * scala.math.pow(base, p).toLong }
  40 + .sum
  41 + }
  42 +
  43 + def encode(i: Long): String = {
  44 +
  45 + @scala.annotation.tailrec
  46 + def div(i: Long, res: List[Int] = Nil): List[Int] = {
  47 + (i / base) match {
  48 + case q if q > 0 => div(q, (i % base).toInt :: res)
  49 + case _ => i.toInt :: res
  50 + }
  51 + }
  52 +
  53 + div(i).map(x => baseString(x)).mkString
  54 + }
  55 +
  56 +}
app/com/piki_ds/utils/hbase/PikiHbase.scala View file @ 858cabb
1 1 package com.piki_ds.utils.hbase
2 2  
  3 +import java.text.SimpleDateFormat
  4 +import java.util.Date
  5 +
  6 +import com.piki_ds.utils.hbase.Base62
3 7 import org.apache.hadoop.hbase.{HColumnDescriptor, TableName, HTableDescriptor}
4 8 import org.apache.hadoop.hbase.client._
5 9 import org.apache.hadoop.hbase.filter.{PrefixFilter}
... ... @@ -29,7 +33,7 @@
29 33 }
30 34 }
31 35  
32   - def get(connection: HConnection, table: HTableInterface, rowkey: String): Result = {
  36 + def get(table: HTableInterface, rowkey: String): Result = {
33 37 var result = new Result()
34 38 try {
35 39 val getObj= new Get(Bytes.toBytes(rowkey))
... ... @@ -43,7 +47,7 @@
43 47 result
44 48 }
45 49  
46   - def scanWithPrefixFilter(connection: HConnection, table: HTableInterface, filterKeyword: String): ResultScanner = {
  50 + def scanWithPrefixFilter(table: HTableInterface, filterKeyword: String): ResultScanner = {
47 51 try {
48 52  
49 53 val scan = new Scan()
50 54  
... ... @@ -59,8 +63,23 @@
59 63 }
60 64 }
61 65  
  66 +
  67 + def scan(table: HTableInterface, startrow: Array[Byte], stoprow: Array[Byte]) = {
  68 + try {
  69 + val scan = new Scan
  70 + scan.setStartRow(startrow)
  71 + scan.setStopRow(stoprow)
  72 + table.getScanner(scan)
  73 + } catch {
  74 + case e: Exception => {
  75 + e.printStackTrace()
  76 + null
  77 + }
  78 + }
  79 + }
  80 +
62 81 //rowkey : { column family1 { column qualifier1 : value, column qualifier2 : value ... }...}
63   - def put(connection: HConnection, table: HTableInterface, rowkey: String, cf: String, cq: String, value: String): Unit = {
  82 + def put(table: HTableInterface, rowkey: String, cf: String, cq: String, value: String): Unit = {
64 83 val tableName = table.getName
65 84 try {
66 85  
... ... @@ -79,7 +98,7 @@
79 98 }
80 99  
81 100 //주의! table이 disable된 후, 스키마 변경됨. disable시키면 테이블 write가 되지 않으므로 서비스중에는 이 함수를 쓰지 않도록 한다.
82   - def addColumnFamily(connection: HConnection, tableName: String, cfNames: Seq[String]): Unit = {
  101 + def addColumnFamily(tableName: String, cfNames: Seq[String]): Unit = {
83 102 try {
84 103 val admin =new HBaseAdmin(PikiHbaseConfig.conf)
85 104 val tableDescriptor = new HTableDescriptor(TableName.valueOf(tableName))
... ... @@ -143,7 +162,7 @@
143 162 }
144 163 }
145 164  
146   - def deleteRow(connection: HConnection, table: HTableInterface, rowKey: String) = {
  165 + def deleteRow(table: HTableInterface, rowKey: String) = {
147 166 try {
148 167 val delete = new Delete(Bytes.toBytes(rowKey))
149 168 table.delete(delete)
... ... @@ -168,9 +187,9 @@
168 187 println("put end")
169 188  
170 189 println("get start")
171   - val result = PikiHbase.get(connection, table, "rowkey1")
172   - val value = result.value()
173   - println(Bytes.toString(value))
  190 + val result = PikiHbase.get(table, "rowkey1")
  191 + val value = result.getValue("f1".getBytes, "c1".getBytes)
  192 + //println(Bytes.toString(value))
174 193 println("get end")
175 194  
176 195 println("put All start")
177 196  
... ... @@ -196,11 +215,18 @@
196 215  
197 216 println("put All end")
198 217  
199   - val uuid = "6055485486985007966"
200   - //val result2 = PikiHbase.scanWithFilter(connection, table, uuid)
  218 + val sdf = new SimpleDateFormat("dd-M-yyyy hh:mm:ss")
  219 + val start = "31-07-2015 00:00:00"
  220 + val end = "31-07-2015 23:59:59"
201 221  
202   - //val value2 = result2.value()
203   - //println(Bytes.toString(value2))
  222 + val uuid = "VqA720"
  223 + val scanner = scan(table, s"$uuid,${Base62.encode(sdf.parse(start).getTime())}".getBytes(), s"$uuid,${Base62.encode(sdf.parse(end).getTime())}".getBytes)
  224 + val iterator = scanner.iterator()
  225 +
  226 + while (iterator.hasNext) {
  227 + println(Bytes.toString(iterator.next().getRow))
  228 + }
  229 + scanner.close
204 230  
205 231 table.close()
206 232 connection.close()