Saturday, February 16, 2019

Different Way of Writing scala functions

Different way of scala functions:

scala> {x:Int=>x+30}

res0: Int => Int = <function1>

scala> val f2={(x:Int,y:Int)=>(x+y)/(x-y)}

f2: (Int, Int) => Int = <function2>

scala> val f3={(x:String,y:Int)=>x+y.toString}

f3: (String, Int) => String = <function2>

scala> val a:Int=35

a: Int = 35

scala> def f3={(x:String,y:Int)=>x+y.toString}

f3: (String, Int) => String

scala> var b="scala"

b: String = scala

scala> def f4(a:Int,b:Int):Int={
     | val p=a+b
     | val q=a-b
     | val r=a*b
     | p*q*r
     | }

f4: (a: Int, b: Int)Int

scala> f4(2,3)

res1: Int = -30

scala> val f2={(x:Int,y:Int)=>(x+y)/(x-y)}

f2: (Int, Int) => Int = <function2>

scala> f2(4,5)

res3: Int = -9

scala> val f3={(x:String,y:Int)=>x+y.toString}

f3: (String, Int) => String = <function2>

scala> f3("scala",3)

res4: String = scala3

scala> {x:Int=>x+30}

res5: Int => Int = <function1>

scala> res5(20)

res6: Int = 50

scala> val f2={(x:Int,y:Int)=>(x+y)/(x-y)}

f2: (Int, Int) => Int = <function2>

High order Function: Function is passed as argument or returned as function

scala> def f5={(a:Int,b:Int,c:(Int,Int)=>Int)=>c(a,b)}

f5: (Int, Int, (Int, Int) => Int) => Int

scala> f5(2,3,f2)

res7: Int = -5

scala> f5(2,3,f4)

res9: Int = -30

scala> def f5={(a:Int,b:Int,c:(Int,Int)=>Int)=>c(a,b)}

f5: (Int, Int, (Int, Int) => Int) => Int

scala> def m1(x:Int,y:Int):Int=x+y

m1: (x: Int, y: Int)Int

scala> def m1(x:Int,y:Int):Int={x+y}

m1: (x: Int, y: Int)Int

Partial Applied function: Parameter as individual argument

scala> def m2(x:Int)(y:Int):Int={x+y}

m2: (x: Int)(y: Int)Int

scala> m1(5,6)

res10: Int = 11

scala> m2(4)

<console>:26: error: missing argument list for method m2

Unapplied methods are only converted to functions when a function type is expected.
You can make this conversion explicit by writing `m2 _` or `m2(_)(_)` instead of `m2`.
       m2(4)
         ^

scala> m2(4)_

res12: Int => Int = <function1>

scala> m2(4)(5)

res14: Int = 9

scala> def m1(x:Int,y:Int):Int={x+y}

m1: (x: Int, y: Int)Int

scala> m1(6,7)

res15: Int = 13

scala> def m3={(a:Int)=>a+20}

m3: Int => Int

scala> val a=Array(2,3,4,5,6)

a: Array[Int] = Array(2, 3, 4, 5, 6)

Built in function:x is passed as function that is called as high order function

scala> a.map{(x:Int)=>x+30}

res16: Array[Int] = Array(32, 33, 34, 35, 36)

scala> a.map(x=>x+30)

res17: Array[Int] = Array(32, 33, 34, 35, 36)

scala> a.filter(x=>x%2==0)

res18: Array[Int] = Array(2, 4, 6)

scala> a.filter{x:Int=>x%2==0}

res19: Array[Int] = Array(2, 4, 6)

scala> val a2=Array(Array(1,2),Array(2,3),Array(3,4))

a2: Array[Array[Int]] = Array(Array(1, 2), Array(2, 3), Array(3, 4))

scala> a2.flatMap(x=>x)

res20: Array[Int] = Array(1, 2, 2, 3, 3, 4)

scala> class A{
     | val a:Int=20
     | val b:Float=2.2f
     | def m1(a:Int,b:Int):Int={a+b}
     | def m2={(a:Int,b:Int)=>a-b}
     | }
cxfdfgf4ty53
defined class A

When the function is in the class scope, we can access through by object...if it is outiside, we can access it directly

scala> val ob1=new A

ob1: A = A@72fb8a1d

scala> ob1.m1(2,3)

res23: Int = 5

scala> ob1.m2(2,3)

res24: Int = -1

Different way of function construction: Below to represent but both are same

scala> def k1(a:Int,b:String)=a+b.toInt

k1: (a: Int, b: String)Int

scala> def k1(a:Int,b:String):Int={a+b.toInt}

k1: (a: Int, b: String)Int

scala> def k2={(x:Int,y:Int)=>x+y}

k2: (Int, Int) => Int

scala> k2(3,4)

res25: Int = 7


Functional programming:

scala> val a1=Array("spark is a big data technology","hadoop is a big data technology","hadoop and spark are big data technologies")

a1: Array[String] = Array(spark is a big data technology, hadoop is a big data technology, hadoop and spark are big data technologies)

scala> a1.size

res26: Int = 3

scala> val a2=a1.map(x=>x.split(" ")).flatMap(x=>x).map(x=>(x,1))

a2: Array[(String, Int)] = Array((spark,1), (is,1), (a,1), (big,1), (data,1), (technology,1), (hadoop,1), (is,1), (a,1), (big,1), (data,1), (technology,1), (hadoop,1), (and,1), (spark,1), (are,1), (big,1), (data,1), (technologies,1))

scala> val a3=a2.groupBy(x=>x._1)

a3: scala.collection.immutable.Map[String,Array[(String, Int)]] = Map(are -> Array((are,1)), is -> Array((is,1), (is,1)), big -> Array((big,1), (big,1), (big,1)), data -> Array((data,1), (data,1), (data,1)), a -> Array((a,1), (a,1)), technologies -> Array((technologies,1)), technology -> Array((technology,1), (technology,1)), hadoop -> Array((hadoop,1), (hadoop,1)), spark -> Array((spark,1), (spark,1)), and -> Array((and,1)))

scala> val a3=a2.groupBy(x=>x._1).map(x=>(x._1,x._2.size))

a3: scala.collection.immutable.Map[String,Int] = Map(are -> 1, is -> 2, big -> 3, data -> 3, a -> 2, technologies -> 1, technology -> 2, hadoop -> 2, spark -> 2, and -> 1)

scala> val x1=List(2,3,4,5,6,7,8)

x1: List[Int] = List(2, 3, 4, 5, 6, 7, 8)

scala> x1.sum

res27: Int = 35

scala> x1.reduce((x,y)=>(x+y))

res29: Int = 35

scala> x1.reduce((x,y)=>(x-y))

res30: Int = -31

scala> x1.max

res31: Int = 8

scala> x1.min

res32: Int = 2

scala> x1.sum/x1.size

res33: Int = 5

scala> x1.reduce((x,y)=>if(x>y) x else y)

res34: Int = 8

scala> x1.reduce((x,y)=>if(x<y) x else y)

res35: Int = 2

scala> val x2=List("a","b","c","d","e","f")

x2: List[String] = List(a, b, c, d, e, f)

scala> x1.zip(x2)

res36: List[(Int, String)] = List((2,a), (3,b), (4,c), (5,d), (6,e), (7,f))

scala> val r1=sc,makeRDD(Array(1,2,3,4,5))

<console>:1: error: ';' expected but ',' found.

val r1=sc,makeRDD(Array(1,2,3,4,5))
         ^
scala> val r1=sc.makeRDD(Array(1,2,3,4,5))

r1: org.apache.spark.rdd.RDD[Int] = ParallelCollectionRDD[0] at makeRDD at <console>:24
                 ^
scala> val f2=sc.makeRDD(Array("a","b","c","d","e"))

f2: org.apache.spark.rdd.RDD[String] = ParallelCollectionRDD[1] at makeRDD at <console>:24
scala> val r2=sc.makeRDD(Array("a","b","c","d","e"))

r2: org.apache.spark.rdd.RDD[String] = ParallelCollectionRDD[2] at makeRDD at <console>:24
scala> r1.zip(r2)

res37: org.apache.spark.rdd.RDD[(Int, String)] = ZippedPartitionsRDD2[3] at zip at <console>:29

scala> r1.zip(r2).collect

res38: Array[(Int, String)] = Array((1,a), (2,b), (3,c), (4,d), (5,e))

scala> val r2=sc.makeRDD(Array("a","b","c","d"))

r2: org.apache.spark.rdd.RDD[String] = ParallelCollectionRDD[5] at makeRDD at <console>:24
scala> r1.zip(r2).collect

19/02/17 02:12:54 ERROR Executor: Exception in task 0.0 in stage 1.0 (TID 1)
org.apache.spark.SparkException: Can only zip RDDs with same number of elements in each partition

scala> x1.zipWithIndex
res40: List[(Int, Int)] = List((2,0), (3,1), (4,2), (5,3), (6,4), (7,5), (8,6))


scala> val a1=Array(1,2,3,4,5,6,7)
a1: Array[Int] = Array(1, 2, 3, 4, 5, 6, 7)

scala> var sum=0
sum: Int = 0

scala> a1.sum   ======> Predefined function
res42: Int = 28

User defined Function:
scala> for(i <- 0 to a1.size-1 by 1){
     | sum=sum+a1(i)
     | }

scala> println(sum)
48

scala> class A{
     | var id =0
     | var name =""
     | }

defined class A

scala> val n1=new A
n1: A = A@3f43c65e

scala> n1.id=20
n1.id: Int = 20

scala> n1.name="spark"
n1.name: String = spark

USER DEFINED FUNCTION
scala> for(i<-0 to a1.size-1 by 1){
     | if(a1(i)%2==0)
     | println(" "+a1(i))
     | }

 2
 4
 6
scala> a1.filter(x=>x%2==0)===>filter is PREDEFINED function where as aregument is user
defined function

res48: Array[Int] = Array(2, 4, 6)

scala> a1

res49: Array[Int] = Array(1, 2, 3, 4, 5, 6, 7)

scala> for(i<-0 to a1.size-1 by 1)
     | {
     | a1(i)=a1(i)+10
     | }

scala> a1

res51: Array[Int] = Array(11, 12, 13, 14, 15, 16, 17)

scala> val a2=Array(3,4,5,6,7,8)

a2: Array[Int] = Array(3, 4, 5, 6, 7, 8)

scala> a2.map(x=>x+10)

res52: Array[Int] = Array(13, 14, 15, 16, 17, 18)

scala> val r=Range(1,100)

r: scala.collection.immutable.Range = Range(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99)

scala> r.map(x=>x+10)

res53: scala.collection.immutable.IndexedSeq[Int] = Vector(11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109)

scala> (5,6)
res57: (Int, Int) = (5,6)

scala> rres55(5,6)
<console>:24: error: not found: value rres55
       rres55(5,6)
       ^
scala> res55(5,6)
res59: Int = -11

scala> a2.map(x=>x+10)
res60: Array[Int] = Array(13, 14, 15, 16, 17, 18)

scala> a2
res61: Array[Int] = Array(3, 4, 5, 6, 7, 8)

scala> a2.map(x=>x+10)
res62: Array[Int] = Array(13, 14, 15, 16, 17, 18)

scala> def f5={x:Int=>x+10}
f5: Int => Int

scala> a2.map(f5)
res63: Array[Int] = Array(13, 14, 15, 16, 17, 18)

scala> a1.map(f5)
res64: Array[Int] = Array(21, 22, 23, 24, 25, 26, 27)

scala> def f6={(x:Int,y:Int)=>
     | val p=x+y
     | val q=x-y
     | val r=x*y
     | val s=x/y
     | p+q+r+s
     | }

f6: (Int, Int) => Int

scala> f6(2,3)

res65: Int = 10

scala> def f7(x:Int,y:Int):Int={
     | val p=x+y
     | val q=x-y
     | val r=x*y
     | val s=x/y
     | p+q+r+s
     | }

f7: (x: Int, y: Int)Int

scala> def m1={(x:Int,y:Int)=>x+y}
m1: (Int, Int) => Int

scala> def m2={(m1:(Int,Int)=>Int,a:Int)=>
     | }
m2: ((Int, Int) => Int, Int) => Unit









No comments:

Post a Comment

Python Challenges Program

Challenges program: program 1: #Input :ABAABBCA #Output: A4B3C1 str1="ABAABBCA" str2="" d={} for x in str1: d[x]=d...