Sunday 19 February 2017

Phoenix Kakfa Integration {Kalyan Contribution to Apache}



Hi All,

Phoenix Kafka is new Integration. You can follow the below link

http://phoenix.apache.org/kafka.html


This is new feature, require now a days .. 

Real time Streaming data pushing into OLTP system like Phoenix.


You can see similar use cases in below links from my blog.

Flume Real Time Projects




Kafka Real Time Projects




How to track the `Production Level of Tracking` for any issues, like below




Please share to you friends & colleagues to know more on these kind of use cases.



SPARK BASICS Practice on 05 Feb 2017

Spark:
--------------------
`Spark Context` => Entry point for Spark Operations

`RDD` => Resilient Distributed DataSets

`RDD` features:
----------------------
1. Immutability
2. Lazy Evaluation
3. Cacheable
4. Type Infer

`RDD` operations:
----------------------
1. Transformations

(input is RDD) => (ouput is RDD)


2. Actions

(input is RDD) => (output is Result)


Examples on RDD:
-----------------------
list <- {1,2,3,4}

Transformations:
----------------------
f(x) <- {x + 1}
f(list) <- {2,3,4,5}

f(x) <- {x * x}
f(list) <- {1,4,9,16}

Actions:
----------------------
sum(list) <- 10
min(list) <- 1
max(list) <- 4

Spark Libraries:
------------------
1. Spark SQL
2. Spark Streaming
3. Spark MLLib
4. Spark GraphX


Command Line approach:
---------------------------
Scala => spark-shell
Python => pyspark
R   => sparkR

Spark context available as sc
SQL context available as sqlContext


How to Create RDD from Spark Context:
----------------------------------------
We can create RDD from Spark Context, using
1. From Collections (List, Seq, Set, ..)
2. From Data Sets (text, csv, tsv, ...)


Creating RDD from Collections:
-------------------------------
val list = List(1,2,3,4)
val rdd = sc.parallelize(list)

-------------------------------------------

scala> val list = List(1,2,3,4)
list: List[Int] = List(1, 2, 3, 4)

scala> val rdd = sc.parallelize(list)
rdd: org.apache.spark.rdd.RDD[Int] = ParallelCollectionRDD[0] at parallelize at <console>:29

scala> rdd.getNumPartitions
res1: Int = 4

scala> val rdd = sc.parallelize(list, 2)
rdd: org.apache.spark.rdd.RDD[Int] = ParallelCollectionRDD[1] at parallelize at <console>:29

scala> rdd.getNumPartitions
res2: Int = 2

-------------------------------------------

scala> rdd.collect()
res3: Array[Int] = Array(1, 2, 3, 4)

scala> rdd.collect().foreach(println)
1
2
3
4

scala> rdd.foreach(println)
3
4
1
2

scala> rdd.foreach(println)
1
2
3
4

-------------------------------------------

scala> rdd.collect
res10: Array[Int] = Array(1, 2, 3, 4)

scala> rdd.map(x => x + 1)
res11: org.apache.spark.rdd.RDD[Int] = MapPartitionsRDD[2] at map at <console>:32

scala> rdd.map(x => x + 1).collect
res12: Array[Int] = Array(2, 3, 4, 5)

scala> rdd.map(x => x * x).collect
res13: Array[Int] = Array(1, 4, 9, 16)


-------------------------------------------

scala> rdd.collect
res14: Array[Int] = Array(1, 2, 3, 4)

scala> rdd.sum
res15: Double = 10.0

scala> rdd.min
res16: Int = 1

scala> rdd.max
res17: Int = 4

scala> rdd.count
res18: Long = 4

-------------------------------------------

Creating RDD from DataSets:
-------------------------------
val file = "file:///home/orienit/work/input/demoinput"

val rdd = sc.textFile(file)

-------------------------------------------

scala> val file = "file:///home/orienit/work/input/demoinput"
file: String = file:///home/orienit/work/input/demoinput

scala> val rdd = sc.textFile(file)
rdd: org.apache.spark.rdd.RDD[String] = MapPartitionsRDD[7] at textFile at <console>:29

scala> rdd.getNumPartitions
res19: Int = 2

scala> val rdd = sc.textFile(file, 1)
rdd: org.apache.spark.rdd.RDD[String] = MapPartitionsRDD[9] at textFile at <console>:29

scala> rdd.getNumPartitions
res20: Int = 1

-------------------------------------------
Word Count Job in Spark
-------------------------------------------
val file = "file:///home/orienit/work/input/demoinput"

val output = "file:///home/orienit/work/output/demooutput"

// creating a rdd from text file
val rdd = sc.textFile(file)

// get all the words from each line
val words = rdd.flatMap(line => line.split(" "))

// create tuple for each word
val tuple = words.map(word => (word, 1))

// final word & count from tuples
val wordcount = tuple.reduceByKey((a,b) => a + b)

// sort the data based on word
val sorted = wordcount.sortBy(word => word)

// save the data into output folder
sorted.saveAsTextFile(output)


-------------------------------------------

scala> rdd.collect
res21: Array[String] = Array(I am going, to hyd, I am learning, hadoop course)

scala> rdd.flatMap(line => line.split(" ")).collect
res22: Array[String] = Array(I, am, going, to, hyd, I, am, learning, hadoop, course)

scala> rdd.flatMap(line => line.split(" ")).map(word => (word, 1)).collect
res23: Array[(String, Int)] = Array((I,1), (am,1), (going,1), (to,1), (hyd,1), (I,1), (am,1), (learning,1), (hadoop,1), (course,1))

scala> rdd.flatMap(line => line.split(" ")).map(word => (word, 1)).reduceByKey((a,b) => a + b).collect
res24: Array[(String, Int)] = Array((learning,1), (hadoop,1), (am,2), (hyd,1), (I,2), (to,1), (going,1), (course,1))

scala> rdd.flatMap(line => line.split(" ")).map(word => (word, 1)).reduceByKey((a,b) => a + b).sortBy(word => word)
res25: org.apache.spark.rdd.RDD[(String, Int)] = MapPartitionsRDD[21] at sortBy at <console>:32

scala> rdd.flatMap(line => line.split(" ")).map(word => (word, 1)).reduceByKey((a,b) => a + b).sortBy(word => word).collect
res26: Array[(String, Int)] = Array((I,2), (am,2), (course,1), (going,1), (hadoop,1), (hyd,1), (learning,1), (to,1))


scala> rdd.flatMap(line => line.split(" ")).map(word => (word, 1)).reduceByKey((a,b) => a + b).sortBy(word => word).collect.foreach(println)
(I,2)
(am,2)
(course,1)
(going,1)
(hadoop,1)
(hyd,1)
(learning,1)
(to,1)

-------------------------------------------


-------------------------------------------
Grep Job in Spark
-------------------------------------------
val file = "file:///home/orienit/work/input/demoinput"

val output = "file:///home/orienit/work/output/demooutput-1"

// creating a rdd from text file
val rdd = sc.textFile(file)

// filter the data based on pattern
val filterData = rdd.filter(line => line.contains("am"))

// save the data into output folder
filterData.saveAsTextFile(output)

-------------------------------------------

val df1 = sqlContext.sql("SELECT * from kalyan.src");

val df2 = sqlContext.sql("SELECT * from student")

df1.show

df2.show

-------------------------------------------

df1.createOrReplaceTempView("tbl1")

df2.createOrReplaceTempView("tbl2")


sqlContext.sql("SELECT * from tbl1").show()

sqlContext.sql("SELECT * from tbl2").show()

sqlContext.sql("SELECT tbl1.*, tbl2.* from tbl1 join tbl2 on tbl1.key = tbl2.id").show()


-------------------------------------------

Saturday 4 February 2017

SCALA BASICS Practice on 04 Feb 2017

Scala (Scalable Language)
---------------------------------
1. Object Orieted + Functional Oriented Programming Language
2. In Scala every thing is Object

Java:
----------------
"String" is "immutable"
"StringBuffer & StringBuilder" is "mutable"

"immutable" => we can't change the data
"mutable" => we can change the data


Scala:
-------------------------
val => "value" is "immutable"
var => "variable" is "mutable"


Java Syntax:
----------------------
<data_type> <variable_name> = <data> ;


Scala Syntax:
----------------------
<variable_name> : <data_type> = <data>

val <variable_name> [: <data_type>] = <data>
var <variable_name> [: <data_type>] = <data>




orienit@kalyan:~$ scala
Welcome to Scala 2.11.8 (OpenJDK 64-Bit Server VM, Java 1.8.0_66-internal).
Type in expressions for evaluation. Or try :help.

scala> 



"Scala" Provides 'REPL' promt
R => Read
E => Evaluate
P => Print
L => Loop


"Scala" providing similar to "Python & R" -> "REPL" Promt

"Scala" provides "Type Infer" feature.
Based on the "data" it automatically find the "data type"

scala> val a = 1
a: Int = 1


scala> val a = 1
a: Int = 1

scala> val a = 1.5
a: Double = 1.5

scala> val a = 1.5f
a: Float = 1.5

scala> val a = 1l
a: Long = 1

scala> val a = 1d
a: Double = 1.0

scala> val a = '1'
a: Char = 1

scala> val a = "1"
a: String = 1

------------------------------------------------

scala> val a = 1
a: Int = 1

scala> val a : Int = 1
a: Int = 1

scala> val a : Double = 1
a: Double = 1.0

scala> val a : Long = 1
a: Long = 1

scala> val a : Char = 1
a: Char = ?

scala> val a : String = 1
<console>:11: error: type mismatch;
 found   : Int(1)
 required: String
       val a : String = 1
                        ^
------------------------------------------------

scala> val a = 1
a: Int = 1

scala> a.toDouble
res0: Double = 1.0

scala> a.toLong
res1: Long = 1

scala> a.toChar
res2: Char = ?

scala> a.toFloat
res3: Float = 1.0

scala> a.toString
res4: String = 1

------------------------------------------------
"Scala" provides "Operator Overloading" similar to "C++"

scala> val a = 1
a: Int = 1

scala> val b = 2
b: Int = 2

scala> val c = a + b
c: Int = 3

scala> val c = a.+(b)
c: Int = 3


a + b <====>  a.+(b)

a - b <====>  a.-(b)

a * b <====>  a.*(b)

------------------------------------------------

scala> val a = 1
a: Int = 1

scala> a = 2
<console>:12: error: reassignment to val
       a = 2
         ^

scala> var a = 1
a: Int = 1

scala> a = 2
a: Int = 2


------------------------------------------------
IF-ELSE
-------------
if(exp) {
body
}

if(exp) {
body1
} else {
body2
}


if(exp1) {
body1
} elseif(exp2) {
body2
} else {
body3
}

------------------------------------------------
Java:
----------
int[] nums = {1, 2, 3, 4, 5};

int[] nums = new int[5];


Scala:
----------
val nums  = Array(1, 2, 3, 4, 5)

val nums  = Array[Int](1, 2, 3, 4, 5)

val nums : Array[Int] = Array[Int](1, 2, 3, 4, 5)


val nums  = Array[Int](10)

val nums  = new Array[Int](10)

------------------------------------------------

scala> val nums  = Array[Int](10)
nums: Array[Int] = Array(10)

scala> val nums  = new Array[Int](10)
nums: Array[Int] = Array(0, 0, 0, 0, 0, 0, 0, 0, 0, 0)

scala> val nums  = Array[Int](1, 2, 3, 4, 5)
nums: Array[Int] = Array(1, 2, 3, 4, 5)

------------------------------------------------

val names = Array[String]("anil", "kalyan", "raj", "venkat")

// names[0]

names(0)

------------------------------------------------
scala> val names = Array[String]("anil", "kalyan", "raj", "venkat")
names: Array[String] = Array(anil, kalyan, raj, venkat)

scala> names[0]
<console>:1: error: identifier expected but integer literal found.
names[0]
      ^

scala> names(0)
res5: String = anil

scala> names(0) = "anil kumar"

scala> names
res7: Array[String] = Array(anil kumar, kalyan, raj, venkat)

scala> names = 1
<console>:12: error: reassignment to val
       names = 1
             ^
------------------------------------------------
"Scala" providing 2 types of "collections"
1. immutable collections => scala.collection.immutable
2. mutable collections   => scala.collection.mutable

------------------------------------------------

scala> 1 to 10
res8: scala.collection.immutable.Range.Inclusive = Range(1, 2, 3, 4, 5, 6, 7, 8, 9, 10)

scala> 1 to 10 by 2
res9: scala.collection.immutable.Range = Range(1, 3, 5, 7, 9)

scala> 1 to 10 by 3
res10: scala.collection.immutable.Range = Range(1, 4, 7, 10)

scala> 1 until 10
res11: scala.collection.immutable.Range = Range(1, 2, 3, 4, 5, 6, 7, 8, 9)

scala> 1 until 10 by 2
res12: scala.collection.immutable.Range = Range(1, 3, 5, 7, 9)

scala> 1 until 10 by 3
res13: scala.collection.immutable.Range = Range(1, 4, 7)


------------------------------------------------

<start number> (to | until) <end number> [by <step number>]

------------------------------------------------
// wrong syntax
for( 1 to 10 ) println(num)

// correct syntax
for( num <- 1 to 10 ) println(num)


------------------------------------------------

scala> for( 1 to 10 ) println(num)
<console>:1: error: '<-' expected but ')' found.
for( 1 to 10 ) println(num)
             ^

scala> for( num <- 1 to 10 ) println(num)
1
2
3
4
5
6
7
8
9
10


for( num <- 1 to 10 by  2) println(num)

for( num <- 1 to 10 ) if(num % 2 == 1) println(num)

for( num <- 1 to 10 if(num % 2 == 1) ) println(num)

------------------------------------------------

scala> for( num <- 1 to 10 by  2) println(num)
1
3
5
7
9

scala> for( num <- 1 to 10 ) if(num % 2 == 1) println(num)
1
3
5
7
9

scala> for( num <- 1 to 10 if(num % 2 == 1) ) println(num)
1
3
5
7
9


------------------------------------------------

for( num <- 1 to 10 ) if(num % 2 == 1) println("num is odd : " + num) else println("num is even : " + num)

scala> for( num <- 1 to 10 ) if(num % 2 == 1) println("num is odd : " + num) else println("num is even : " + num)
num is odd : 1
num is even : 2
num is odd : 3
num is even : 4
num is odd : 5
num is even : 6
num is odd : 7
num is even : 8
num is odd : 9
num is even : 10


------------------------------------------------
String Interpolation:
---------------------------
val id = 1
val name = "kalyan"
val course = "spark"
val percentage = 90.5

val exp1 = "name is kalyan, course is spark"
println(exp1)

val exp2 = "name is " + name + ", course is " + course
println(exp2)

val exp3 = "name is $name, course is $course"
println(exp3)


val exp4 = s"name is $name, course is $course"
println(exp4)


val exp5 = s"name is $name, percentage is $percentage"
println(exp5)


val exp6 = s"name is $name, percentage is $percentage%.2f"
println(exp6)


val exp7 = f"name is $name, percentage is $percentage%.2f"
println(exp7)


val exp8 = s"name is $name\ncourse is $course"
println(exp8)

val exp9 = raw"name is $name\ncourse is $course"
println(exp9)


------------------------------------------------




------------------------------------------------

scala> val id = 1
id: Int = 1

scala> val name = "kalyan"
name: String = kalyan

scala> val course = "spark"
course: String = spark

scala> val percentage = 90.5
percentage: Double = 90.5

scala> val exp1 = "name is kalyan, course is spark"
exp1: String = name is kalyan, course is spark

scala> println(exp1)
name is kalyan, course is spark


scala> val exp2 = "name is " + name + ", course is " + course
exp2: String = name is kalyan, course is spark

scala> println(exp2)
name is kalyan, course is spark



scala> val exp3 = "name is $name, course is $course"
exp3: String = name is $name, course is $course

scala> println(exp3)
name is $name, course is $course

scala> val exp4 = s"name is $name, course is $course"
exp4: String = name is kalyan, course is spark

scala> println(exp4)
name is kalyan, course is spark


scala> val exp5 = s"name is $name, percentage is $percentage"
exp5: String = name is kalyan, percentage is 90.5

scala> println(exp5)
name is kalyan, percentage is 90.5

scala> val exp6 = s"name is $name, percentage is $percentage%.2f"
exp6: String = name is kalyan, percentage is 90.5%.2f

scala> println(exp6)
name is kalyan, percentage is 90.5%.2f

scala> val exp7 = f"name is $name, percentage is $percentage%.2f"
exp7: String = name is kalyan, percentage is 90.50

scala> println(exp7)
name is kalyan, percentage is 90.50


scala> val exp8 = s"name is $name\ncourse is $course"
exp8: String =
name is kalyan
course is spark

scala> println(exp8)
name is kalyan
course is spark

scala> val exp9 = raw"name is $name\ncourse is $course"
exp9: String = name is kalyan\ncourse is spark

scala> println(exp9)
name is kalyan\ncourse is spark

------------------------------------------------
Functional Programming in Scala:
----------------------------------------
1. Named Functions
2. Anounmus Functions
3. Curried Functions

------------------------------------------------

Anounmus Functions:
-----------------------
(x : Int) => { x + 1 }

val add = (x : Int) => { x + 1 }

add(1)

add(10)

------------------------------------------------
scala> (x : Int) => { x + 1 }
res28: Int => Int = <function1>

scala> val add = (x : Int) => { x + 1 }
add: Int => Int = <function1>

scala> add(1)
res29: Int = 2

scala> add(10)
res30: Int = 11


------------------------------------------------
Named Functions:
-----------------------

val add = (x : Int) => { x + 1 }

def add(x : Int) = { x + 1 }

scala> def add(x : Int) = { x + 1 }
add: (x: Int)Int

scala> add(1)
res31: Int = 2

scala> add(10)
res32: Int = 11


------------------------------------------------
Curried Functions:
-----------------------
def add(x : Int, y : Int) = { x + y }

def add(x : Int)(y : Int) = { x + y }


scala> def add(x : Int)(y : Int) = { x + y }
add: (x: Int)(y: Int)Int

scala> add(1)(2)
res33: Int = 3

scala> add(10)(20)
res34: Int = 30

------------------------------------------------

val addOne(z : Int) = add(z : Int)(1)
val addOne(z : Int) = add(1)(z : Int)


------------------------------------------------

def factorial(n : Int) : Int = {
 if(n == 0) 1
 else n * factorial(n - 1)
}

------------------------------------------------
scala> def factorial(n : Int) : Int = {
     |  if(n == 0) 1
     |  else n * factorial(n - 1)
     | }
factorial: (n: Int)Int


scala> factorial(5)
res38: Int = 120

scala> factorial(4)
res39: Int = 24


def factorial(n : Int) : Int = {
 println(n)
 if(n == 1) 1
 else n * factorial(n - 1)
}

scala> def factorial(n : Int) : Int = {
     |  println(n)
     |  if(n == 1) 1
     |  else n * factorial(n - 1)
     | }
factorial: (n: Int)Int

scala> factorial(5)
5
4
3
2
1
res41: Int = 120

------------------------------------------------

def factorial(n : Int) : Int = {
 def fact(x : Int, y: Int) : Int = {
  println(x)
  if(x == y) x
  else x * fact(x + 1, y)
 }
 fact(1,n)
}


scala> def factorial(n : Int) : Int = {
     |  def fact(x : Int, y: Int) : Int = {
     |   println(x)
     |   if(x == y) x
     |   else x * fact(x + 1, y)
     |  }
     |  fact(1,n)
     | }
factorial: (n: Int)Int

scala> factorial(5)
1
2
3
4
5
res44: Int = 120


------------------------------------------------

package com.orienit.scala.learnings

case class Student(name: String = "kalyan", id: Int, year: Int)

class Employee(name: String, id: Int)

object Sample3 extends App {
  val s1 = Student("kalyan", 1, 2016)
  val s2 = new Student("venkat", 1, 2016)
  val s3 = new Student(id = 1, year = 2016)
  val s4 = new Student(year = 2016, id = 1)

  val e1 = new Employee("venkat", 1)
  // val e2 = Employee("venkat", 1)

}


Related Posts Plugin for WordPress, Blogger...