Upgrade to Pro — share decks privately, control downloads, hide ads and more …

Functional Programming in Kotlin with funKTionale

Mario Arias
September 29, 2016

Functional Programming in Kotlin with funKTionale

Talk for Manchester Java Community - Sept 28, 2016

Mario Arias

September 29, 2016
Tweet

More Decks by Mario Arias

Other Decks in Programming

Transcript

  1. Software Engineer at Cake Solutions 10+ years of experience with

    JVM technologies Spring certified trainer 5+ years with Scala 3+ years with Kotlin funKTionale KotlinPrimavera RxKotlin original developer and team leader* NOT an expert on functional programming * I hit “Merge” and “Release” buttons
  2. Introduction Concept Kotlin First class and higher-order functions Yes Pure

    functions Yes* Recursion Yes Lazy evaluation Yes* Strong type system Yes*
  3. Functions // explicit type val add2: (Int) -> Int =

    { i -> i + 2 } // inferred type
 val add2 = { i:Int -> i + 2 }
  4. Functions (II) /** A function that takes 1 argument. */


    public interface Function1<in P1, out R> : Function<R> {
 /** Invokes the function with the specified argument. */
 public operator fun invoke(p1: P1): R
 }
  5. Functional eye for the imperative guy fun factorial(n: Long): Long

    {
 var result = 1L
 for (it in 1..n) {
 result *= it
 }
 return result
 } fun functionalFactorial(n: Long): Long {
 fun go(n: Long, acc: Long): Long {
 return if (n <= 0) {
 acc
 } else {
 go(n - 1, n * acc)
 }
 }
 return go(n, 1)
 } fun tailrecFactorial(n: Long): Long {
 tailrec fun go(n: Long, acc: Long): Long {
 return if (n <= 0) {
 acc
 } else {
 go(n - 1, n * acc)
 }
 }
 return go(n, 1)
 } factorial(20)* 0μs 0,025μs 0,05μs 0,075μs 0,1μs factorial functional tailrec Error Value 0,064 0,092 0,078 0,001 0,003 0,002 0,002 0,003 0,001 *Calculated with JMH, SampleTime mode
  6. fun fib(n: Long): Long = when (n) {
 0L ->

    0
 1L -> 1
 else -> {
 var a = 0L
 var b = 1L
 var c = 0L
 for (it in 2..n) {
 c = a + b
 a = b
 b = c
 }
 c
 }
 } fun functionalFib(n: Long): Long {
 fun go(n: Long, prev: Long, cur: Long): Long {
 return if (n == 0L) prev
 else go(n - 1, cur, prev + cur)
 
 }
 return go(n, 0, 1)
 } fun tailrecFib(n: Long): Long {
 tailrec fun go(n: Long, prev: Long, cur: Long): Long {
 return if (n == 0L) prev
 else go(n - 1, cur, prev + cur)
 
 }
 return go(n, 0, 1)
 } fib(93)* 0μs 30000μs 60000μs 90000μs 120000μs fib functional tailrec Error Value 110.028 115.192 97.997 0,003 0,012 0,013 0,013 0,012 0,003 *Calculated with JMH, SampleTime mode
  7. Function composition A technique to create a new function using

    two existing functions. % ps aux | grep java
  8. infix fun<P1, IP, R> Function1<P1, IP>.andThen(f: (IP)  R): (P1)

     R = forwardCompose(f)
 
 infix fun<P1, IP, R> Function1<P1, IP>.forwardCompose(f: (IP)  R): (P1)  R {
 return { p1: P1  f(this(p1)) }
 }
 
 infix fun<IP, R, P1> Function1<IP, R>.compose(f: (P1)  IP): (P1)  R {
 return { p1: P1  this(f(p1)) }
 }

  9. fun main(args: Array<String>) {
 val conf = SparkConf().setMaster("local").setAppName("My App")
 val

    sc = JavaSparkContext(conf)
 val split: (String) -> List<String> = { it.split("|") }
 val upper: (String) -> String = { it.toUpperCase() }
 val user: (List<String>) -> User = { User(it[0], it[1].toInt()) }
 val users = sc.textFile("s3://path/to/my-petabyte-file.txt")
 .map(upper)
 .map(split)
 .map(user)
 
 users.take(20).forEach { println(it) }
 } import org.funktionale.composition.andThen fun main(args: Array<String>) {
 val conf = SparkConf().setMaster("local").setAppName("My App")
 val sc = JavaSparkContext(conf)
 val split: (String) -> List<String> = { it.split("|") }
 val upper: (String) -> String = { it.toUpperCase() }
 val user: (List<String>) -> User = { User(it[0], it[1].toInt()) }
 val users = sc.textFile("s3://path/to/my-petabyte-file.txt")
 .map(upper andThen split andThen user)
 
 users.take(20).forEach { println(it) }
 } Each map() transformation could be potentially distributed across nodes/ partitions* Just one map() transformation composed by several functions * Yes, Apache Spark is compatible with Kotlin
  10. Currying Transforming a function of arity n into a sequence

    of n functions with arity 1 (x, y, z) => r (x) => (y) => (z) => r https://wiki.haskell.org/Haskell_Brooks_Curry
  11. fun<P1, P2, R> Function2<P1, P2, R>.curried(): (P1)  (P2) 

    R {
 return { p1: P1  { p2: P2  this(p1, p2) } }
 }
 
 
 fun<P1, P2, P3, R> Function3<P1, P2, P3, R>.curried(): (P1)  (P2)  (P3)  R {
 return { p1: P1  { p2: P2  { p3: P3  this(p1, p2, p3) } } }
 }
 //… all the way to Function22 

  12. fun main(args: Array<String>) {
 val conf = SparkConf().setMaster("local[*]").setAppName("ML")
 val sc

    = JavaSparkContext(conf)
 
 val spam = sc.textFile("spam.txt")
 val ham = sc.textFile("ham.txt")
 
 val tf = HashingTF(10000)
 
 val posExamples = ham.map { LabeledPoint(1.0, tf.transform(listOf(it.split(" ")))) }
 val negExamples = spam.map { LabeledPoint(0.0, tf.transform(listOf(it.split(" ")))) }
 val trainData = posExamples.union(negExamples)
 trainData.cache()
 val model = LogisticRegressionWithLBFGS().run(trainData.rdd())
 } import org.funktionale.currying.curried fun main(args: Array<String>) {
 val conf = SparkConf().setMaster("local[*]").setAppName("ML")
 val sc = JavaSparkContext(conf)
 
 val spam = sc.textFile("spam.txt")
 val ham = sc.textFile("ham.txt")
 
 val tf = HashingTF(10000)
 
 val labeling = { label: Double, email: String ->
 LabeledPoint(label, tf.transform(listOf(email.split(" "))))
 }
 val curried = labeling.curried()
 val posExamples = ham.map(curried(1.0))
 val negExamples = spam.map(curried(0.0))
 val trainData = posExamples.union(negExamples)
 trainData.cache()
 val model = LogisticRegressionWithLBFGS().run(trainData.rdd())
 }
  13. Partial applied functions Calling a function with less parameters than

    the function’s arity (fixing parameters) will return a new function with a smaller arity f(x,y,z) f(1,2) => g(z)
  14. fun <P1, P2, R> Function2<P1, P2, R>.partially1(p1: P1): (P2) 

    R {
 return { p2: P2  this(p1, p2) }
 }
 
 
 fun <P1, P2, R> Function2<P1, P2, R>.partially2(p2: P2): (P1)  R {
 return { p1: P1  this(p1, p2) }
 }
 
 
 fun <P1, P2, P3, R> Function3<P1, P2, P3, R>.partially1(p1: P1): (P2, P3)  R {
 return { p2: P2, p3: P3  this(p1, p2, p3) }
 }
 
 
 fun <P1, P2, P3, R> Function3<P1, P2, P3, R>.partially2(p2: P2): (P1, P3)  R {
 return { p1: P1, p3: P3  this(p1, p2, p3) }
 }
 
 
 fun <P1, P2, P3, R> Function3<P1, P2, P3, R>.partially3(p3: P3): (P1, P2)  R {
 return { p1: P1, p2: P2  this(p1, p2, p3) }
 } //All the way to Function22
  15. fun <P1, P2, R> Function2<P1, P2, R>.invoke(p1: P1, partial2: Partial<P2>

    = partial()): (P2)  R {
 return { p2: P2  this(p1, p2) }
 }
 
 
 fun <P1, P2, R> Function2<P1, P2, R>.invoke(partial1: Partial<P1> = partial(), p2: P2): (P1)  R {
 return { p1: P1  this(p1, p2) }
 }
 
 
 fun <P1, P2, P3, R> Function3<P1, P2, P3, R>.invoke(p1: P1, partial2: Partial<P2> = partial(), partial3: Partial<P3> = partial()): (P2, P3)  R {
 return { p2: P2, p3: P3  this(p1, p2, p3) }
 }
 
 
 fun <P1, P2, P3, R> Function3<P1, P2, P3, R>.invoke(partial1: Partial<P1> = partial(), p2: P2, partial3: Partial<P3> = partial()): (P1, P3)  R {
 return { p1: P1, p3: P3  this(p1, p2, p3) }
 }
 
 
 fun <P1, P2, P3, R> Function3<P1, P2, P3, R>.invoke(partial1: Partial<P1> = partial(), partial2: Partial<P2> = partial(), p3: P3): (P1, P2)  R {
 return { p1: P1, p2: P2  this(p1, p2, p3) } } //All the way to Function22…. This file has 2532 lines 
 

  16. dr_id dr_name dr_age 1 Carlos 37 2 Laura 28 3

    Ari 25 doctors p_id p_name p_age 1 Andy 55 2 Riba 26 3 Joanie 33 5 Ben 32 6 Pete 45 patients n_id n_name n_age 1 Diana 33 2 Sarah 28 3 Liz 26 nurses
  17. import org.kotlinprimavera.jdbc.core.extract val doctors = template.query("select * from doctors") {

    rs, i ->
 rs.extract { //DSL from KotlinPrimavera
 User(string["dr_name"]!!, int["dr_age"]!!)
 }
 }
 
 val nurses = template.query("select * from nurses") { rs, i ->
 rs.extract {
 User(string["n_name"]!!, int["n_age"]!!)
 }
 }
 
 val patients = template.query("select * from patients") { rs, i ->
 rs.extract {
 User(string["p_name"]!!, int["p_age"]!!)
 }
 } import org.kotlinprimavera.jdbc.core.extract import org.funktionale.partials.* val mapper: (ResultSet, Int, String) -> User = { rs, i, prefix ->
 rs.extract { //DSL from KotlinPrimavera
 User(string["${prefix}_name"]!!, int["${prefix}_age"]!!)
 }
 }
 
 val doctors = template.query("select * from doctors", mapper(p3 = "dr"))
 
 val nurses = template.query("select * from nurses", mapper.partially3("n"))
 
 val patients = template.query("select * from patients", mapper(p3 = "p"))
  18. Option “Don’t stain my null-safe language with your monads” -

    No one, never* * For some definitions of “No one” and “never”
  19. Option is a type that represent the existence or absence

    of a meaningful value Examples of meaningful value • Succesful operation (no exceptions) • An existent value (record in DB) • An useful value (non-empty list)
  20. Examples without Option Representation Example Problems A value of the

    same type defined by convention indexOf(x) will return -1 if x doesn’t exists in the structure (Array, List) • Is not mandatory to check • Based on oral tradition An exception Spring’s JdbcTemplate will throw an EmptyResultDAE if no record is available* • Runtime Exception • Exception-based logic null Hibernate will return null if no record is available • is null * I kinda like it
  21. Option operations • map* • fold • flatMap* • filter*

    • filterNot • exists • forEach~ • get* • getOrElse~ • orElse* fun getSome(): Option<String> = "kotlin".toOption()
 
 fun getNone(): Option<String> = null.toOption()
 
 @Test fun option() {
 val option = getSome()
 when (option) {
 is Some<String> -> assertEquals(option.get(), "kotlin") 
 is None -> fail()
 }
 
 val otherOption = getNone()
 when (otherOption) {
 is Some<String> -> fail()
 is None -> assertEquals(otherOption, None)
 }
 }
 
 @Test fun getOrElse() {
 assertEquals(getSome().getOrElse { "java" }, "kotlin")
 assertEquals(getNone().getOrElse { "java" }, "java")
 }
 
 @Test fun orNull() {
 assertNotNull(getSome().orNull())
 assertNull(getNone().orNull())
 }
 
 @Test fun map() {
 assertEquals(getSome().map { it.toUpperCase() }.get(), "KOTLIN")
 assertEquals(getNone().map { it.toUpperCase() }, None)
 }
  22. fun divide(num: Int, den: Int): Option<Int> {
 return if (num

    % den != 0) {
 None
 } else {
 Some(num / den)
 }
 }
 
 fun division(a: Int, b: Int, c: Int): Option<Pair<Int, Int>> {
 val ac = divide(a, c)
 return when (ac) {
 is Some<Int> -> {
 val bc = divide(b, c)
 when (bc) {
 is Some<Int> -> {
 Some(ac.get() to bc.get())
 }
 else -> None
 }
 }
 else -> None
 }
 } An example* Based on Ken Barclay’s* post http://kenbarclay.blogspot.co.uk/2014/02/kotlin-option-type-2.html division function let me check if two numbers (a,b) are divisible by a third one (c) Even if ugly, this is still possible with -1, exceptions or null
  23. fun division(a: Int, b: Int, c: Int): Option<Pair<Int, Int>> {


    return divide(a, c).flatMap { ac ->
 divide(b, c).flatMap { bc ->
 Some(ac to bc)
 }
 }
 } division with flatMap fun division(a: Int, b: Int, c: Int): Option<Pair<Int, Int>> {
 val ac = divide(a, c)
 return when (ac) {
 is Some<Int> -> {
 val bc = divide(b, c)
 when (bc) {
 is Some<Int> -> {
 Some(ac.get() to bc.get())
 }
 else -> None
 }
 }
 else -> None
 }
 } = replaced by 1st flatMap = replaced by 2nd flatMap
  24. fun division(a: Int, b: Int, c: Int): Option<Pair<Int, Int>> {


    return divide(a, c).flatMap { ac ->
 divide(b, c).flatMap { bc ->
 Pair(ac, bc).toOption()
 }
 }
 } Overloading division fun division(a: Int, b: Int, c: Int, d: Int): Option<Triple<Int, Int, Int>> {
 return divide(a, d).flatMap { ad ->
 divide(b, d).flatMap { bd ->
 divide(c, d).flatMap { cd ->
 Triple(ad, bd, cd).toOption()
 }
 }
 }
 }