Can somebody please help me out with below error? I am trying to convert dataframe to rdd so that it can be used for regression model building.
SPARK VERSION : 2.0.0
Error => ClassCastException: org.apache.spark.ml.linalg.DenseVector cannot be cast to org.apache.spark.mllib.linalg.Vector
Code =>
import org.apache.spark.mllib.classification.LogisticRegressionWithLBFGS
import org.apache.spark.mllib.linalg.Vectors
import org.apache.spark.mllib.regression.LabeledPoint
import org.apache.spark.rdd.RDD
import org.apache.spark.sql._
import org.apache.spark.sql.Row
val binarizer2: Binarizer = new Binarizer()
.setInputCol("repay_amt").setOutputCol("label").setThreshold(20.00)
df = binarizer2.transform(df)
val assembler = new VectorAssembler()
.setInputCols(Array("tot_txns", "avg_unpaiddue", "max_unpaiddue", "sale_txn", "max_amt", "tot_sale_amt")).setOutputCol("features")
df = assembler.transform(df)
df.write.mode(SaveMode.Overwrite).parquet("lazpay_final_data.parquet")
val df2 = spark.read.parquet("lazpay_final_data.parquet/")
val df3= df2.rdd.map(r => LabeledPoint(r.getDouble(0),r.getAs("features")))
Data =>