Spark 二项逻辑回归__二分类
package Spark_MLlib import org.apache.spark.ml.Pipeline
import org.apache.spark.ml.classification.{BinaryLogisticRegressionSummary, LogisticRegression, LogisticRegressionModel}
import org.apache.spark.ml.evaluation.MulticlassClassificationEvaluator
import org.apache.spark.ml.feature.{IndexToString, StringIndexer, VectorIndexer}
import org.apache.spark.sql.SparkSession
import org.apache.spark.ml.linalg.{Vector, Vectors}
import org.apache.spark.sql.functions case class data_schema(features:Vector,label:String)
object 二项逻辑回归__二分类 {
val spark=SparkSession.builder().master("local").getOrCreate()
import spark.implicits._ //支持把一个RDD隐式转换为一个DataFrame
def main(args: Array[String]): Unit = {
val df =spark.sparkContext.textFile("file:///home/soyo/桌面/spark编程测试数据/soyo.txt")
.map(_.split(",")).map(x=>data_schema(Vectors.dense(x().toDouble,x().toDouble,x().toDouble,x().toDouble),x())).toDF()
df.show()
df.createOrReplaceTempView("data_schema")
val df_data=spark.sql("select * from data_schema where label !='soyo2'") //这里soyo2需要加单引号,不然报错
// df_data.map(x=>x(1)+":"+x(0)).collect().foreach(println)
df_data.show()
val labelIndexer=new StringIndexer().setInputCol("label").setOutputCol("indexedLabel").fit(df_data)
val featureIndexer=new VectorIndexer().setInputCol("features").setOutputCol("indexedFeatures").fit(df_data) //目的在特征向量中建类别索引
val Array(trainData,testData)=df_data.randomSplit(Array(0.7,0.3))
val lr=new LogisticRegression().setLabelCol("indexedLabel").setFeaturesCol("indexedFeatures").setMaxIter().setRegParam(0.5).setElasticNetParam(0.8)//setRegParam:正则化参数,设置elasticnet混合参数为0.8,setFamily("multinomial"):设置为多项逻辑回归,不设置setFamily为二项逻辑回归
val labelConverter=new IndexToString().setInputCol("prediction").setOutputCol("predictionLabel").setLabels(labelIndexer.labels) val lrPipeline=new Pipeline().setStages(Array(labelIndexer,featureIndexer,lr,labelConverter))
val lrPipeline_Model=lrPipeline.fit(trainData)
val lrPrediction=lrPipeline_Model.transform(testData)
lrPrediction.show(false)
// lrPrediction.take(100).foreach(println)
//模型评估
val evaluator=new MulticlassClassificationEvaluator().setLabelCol("indexedLabel").setPredictionCol("prediction")
val lrAccuracy=evaluator.evaluate(lrPrediction)
println("准确率为: "+lrAccuracy)
val lrError=-lrAccuracy
println("错误率为: "+lrError)
val LRmodel=lrPipeline_Model.stages().asInstanceOf[LogisticRegressionModel]
println("二项逻辑回归模型系数的向量: "+LRmodel.coefficients)
println("二项逻辑回归模型的截距: "+LRmodel.intercept)
println("类的数量(标签可以使用的值): "+LRmodel.numClasses)
println("模型所接受的特征的数量: "+LRmodel.numFeatures)
//对模型的总结(summary)目前只支持二项逻辑斯蒂回归,多项式逻辑回归并不支持(用的是spark 2.2.0)
println(LRmodel.hasSummary)
val trainingSummary = LRmodel.summary
//损失函数,可以看到损失函数随着循环是逐渐变小的,损失函数越小,模型就越好
val objectiveHistory =trainingSummary.objectiveHistory
objectiveHistory.foreach(println)
//强制转换为BinaryLogisticRegressionSummary
val binarySummary= trainingSummary.asInstanceOf[BinaryLogisticRegressionSummary]
//ROC曲线下方的面积,越接近1说明模型越好
val area_ROC=binarySummary.areaUnderROC
println("ROC 曲线下的面积为: "+area_ROC)
//fMeasureByThreshold:返回一个带有beta = 1.0的两个字段(阈值,f - measure)曲线的dataframe
val fMeasure=binarySummary.fMeasureByThreshold
println("fMeasure的行数: "+fMeasure.collect().length)
fMeasure.show()
val maxFMeasure=fMeasure.select(functions.max("F-Measure")).head().getDouble()
println("最大的F-Measure的值为: "+maxFMeasure)
//最优的阈值
val bestThreashold=fMeasure.where($"F-Measure"===maxFMeasure).select("threshold").head().getDouble()
println("最优的阈值为:"+bestThreashold)
/* 这样求的不是最优的阈值
val s=fMeasure.select(functions.max("threshold")).head().getDouble(0)
println(s)
*/
LRmodel.setThreshold(bestThreashold) }
}
结果:
+-----------------+-----+------------+------------------+--------------------------------------------+----------------------------------------+----------+---------------+
|features |label|indexedLabel|indexedFeatures |rawPrediction |probability |prediction|predictionLabel|
+-----------------+-----+------------+------------------+--------------------------------------------+----------------------------------------+----------+---------------+
|[4.4,2.9,1.4,0.2]|soyo1|0.0 |[4.4,2.9,1.4,1.0] |[0.0690256519103008,-0.0690256519103008] |[0.5172495646670774,0.48275043533292256]|0.0 |soyo1 |
|[4.4,3.0,1.3,0.2]|soyo1|0.0 |[4.4,3.0,1.3,1.0] |[0.07401171769156373,-0.07401171769156373] |[0.518494487869238,0.481505512130762] |0.0 |soyo1 |
|[4.6,3.1,1.5,0.2]|soyo1|0.0 |[4.6,3.1,1.5,1.0] |[0.06403958612903785,-0.06403958612903785] |[0.5160044273015656,0.48399557269843435]|0.0 |soyo1 |
|[4.6,3.2,1.4,0.2]|soyo1|0.0 |[4.6,3.2,1.4,1.0] |[0.0690256519103008,-0.0690256519103008] |[0.5172495646670774,0.48275043533292256]|0.0 |soyo1 |
|[4.6,3.6,1.0,0.2]|soyo1|0.0 |[4.6,3.6,1.0,1.0] |[0.08896991503535255,-0.08896991503535255] |[0.5222278183980882,0.4777721816019118] |0.0 |soyo1 |
|[4.8,3.0,1.4,0.1]|soyo1|0.0 |[4.8,3.0,1.4,0.0] |[0.0690256519103008,-0.0690256519103008] |[0.5172495646670774,0.48275043533292256]|0.0 |soyo1 |
|[4.9,2.5,4.5,1.7]|soyo3|1.0 |[4.9,2.5,4.5,9.0] |[-0.08554238730885033,0.08554238730885033] |[0.47862743439605193,0.5213725656039481]|1.0 |soyo3 |
|[5.0,3.0,1.6,0.2]|soyo1|0.0 |[5.0,3.0,1.6,1.0] |[0.059053520347774904,-0.059053520347774904]|[0.5147590911988562,0.48524090880114373]|0.0 |soyo1 |
|[5.1,3.5,1.4,0.3]|soyo1|0.0 |[5.1,3.5,1.4,2.0] |[0.0690256519103008,-0.0690256519103008] |[0.5172495646670774,0.48275043533292256]|0.0 |soyo1 |
|[5.1,3.8,1.6,0.2]|soyo1|0.0 |[5.1,3.8,1.6,1.0] |[0.059053520347774904,-0.059053520347774904]|[0.5147590911988562,0.48524090880114373]|0.0 |soyo1 |
|[5.3,3.7,1.5,0.2]|soyo1|0.0 |[5.3,3.7,1.5,1.0] |[0.06403958612903785,-0.06403958612903785] |[0.5160044273015656,0.48399557269843435]|0.0 |soyo1 |
|[5.4,3.7,1.5,0.2]|soyo1|0.0 |[5.4,3.7,1.5,1.0] |[0.06403958612903785,-0.06403958612903785] |[0.5160044273015656,0.48399557269843435]|0.0 |soyo1 |
|[5.4,3.9,1.7,0.4]|soyo1|0.0 |[5.4,3.9,1.7,3.0] |[0.05406745456651198,-0.05406745456651198] |[0.5135135717949689,0.486486428205031] |0.0 |soyo1 |
|[5.7,3.8,1.7,0.3]|soyo1|0.0 |[5.7,3.8,1.7,2.0] |[0.05406745456651198,-0.05406745456651198] |[0.5135135717949689,0.486486428205031] |0.0 |soyo1 |
|[5.8,2.8,5.1,2.4]|soyo3|1.0 |[5.8,2.8,5.1,16.0]|[-0.11545878199642795,0.11545878199642795] |[0.4711673274353307,0.5288326725646694] |1.0 |soyo3 |
|[5.8,4.0,1.2,0.2]|soyo1|0.0 |[5.8,4.0,1.2,1.0] |[0.07899778347282668,-0.07899778347282668] |[0.5197391814925231,0.480260818507477] |0.0 |soyo1 |
|[6.1,3.0,4.9,1.8]|soyo3|1.0 |[6.1,3.0,4.9,10.0]|[-0.10548665043390212,0.10548665043390212] |[0.4736527642876721,0.5263472357123279] |1.0 |soyo3 |
|[6.3,2.7,4.9,1.8]|soyo3|1.0 |[6.3,2.7,4.9,10.0]|[-0.10548665043390212,0.10548665043390212] |[0.4736527642876721,0.5263472357123279] |1.0 |soyo3 |
|[6.3,2.9,5.6,1.8]|soyo3|1.0 |[6.3,2.9,5.6,10.0]|[-0.14038911090274264,0.14038911090274264] |[0.46496025354157383,0.5350397464584261]|1.0 |soyo3 |
|[6.5,3.0,5.5,1.8]|soyo3|1.0 |[6.5,3.0,5.5,10.0]|[-0.13540304512147971,0.13540304512147971] |[0.4662008623530858,0.5337991376469143] |1.0 |soyo3 |
+-----------------+-----+------------+------------------+--------------------------------------------+----------------------------------------+----------+---------------+
only showing top 20 rows
准确率为: 1.0
错误率为: 0.0
二项逻辑回归模型系数的向量: [0.0,0.0,0.0498606578126294,-0.0]
二项逻辑回归模型的截距: -0.13883057284798195
类的数量(标签可以使用的值): 2
模型所接受的特征的数量: 4
true
0.6927819059876479
0.6921535505946383
0.6902127176671448
0.6898394130469451
0.689535794969328
0.6894009255584304
0.6893497986701255
0.689265433291139
0.6887228224555286
0.6895877386375889
0.6872109190567809
ROC 曲线下的面积为: 1.0
fMeasure的行数: 26
+-------------------+-------------------+
| threshold| F-Measure|
+-------------------+-------------------+
| 0.5511227178429281|0.05128205128205127|
| 0.5486545095952616| 0.1|
| 0.547419499422364|0.14634146341463414|
| 0.5449477416103359| 0.1904761904761905|
| 0.5412359859690851| 0.2727272727272727|
| 0.5399976958289747|0.34782608695652173|
| 0.5387589116841329|0.38297872340425526|
| 0.5375196486465557| 0.4799999999999999|
| 0.5362799218518347| 0.5098039215686275|
| 0.5350397464584261| 0.6428571428571429|
| 0.5337991376469143| 0.6896551724137931|
| 0.5325581106192748| 0.7333333333333334|
| 0.5313166805981351| 0.7741935483870968|
| 0.5300748628260323| 0.8125000000000001|
| 0.5288326725646694| 0.9142857142857143|
| 0.5275901250941695| 0.958904109589041|
| 0.5263472357123279| 0.972972972972973|
| 0.5251040197338624| 1.0|
| 0.4889779551275146| 0.9743589743589743|
| 0.486486428205031| 0.9500000000000001|
|0.48524090880114373| 0.8941176470588235|
|0.48399557269843435| 0.7916666666666666|
|0.48275043533292256| 0.7307692307692308|
| 0.481505512130762| 0.6909090909090909|
| 0.480260818507477| 0.6846846846846847|
|0.47901636986720014| 0.6785714285714285|
+-------------------+-------------------+
最大的F-Measure的值为: 1.0
最优的阀值为:0.5251040197338624
Spark 二项逻辑回归__二分类的更多相关文章
- Spark 多项式逻辑回归__二分类
package Spark_MLlib import org.apache.spark.ml.Pipeline import org.apache.spark.ml.classification.{L ...
- Spark 多项式逻辑回归__多分类
package Spark_MLlib import org.apache.spark.ml.Pipeline import org.apache.spark.ml.classification.{B ...
- 机器学习 —— 基础整理(五)线性回归;二项Logistic回归;Softmax回归及其梯度推导;广义线性模型
本文简单整理了以下内容: (一)线性回归 (二)二分类:二项Logistic回归 (三)多分类:Softmax回归 (四)广义线性模型 闲话:二项Logistic回归是我去年入门机器学习时学的第一个模 ...
- 机器学习---逻辑回归(二)(Machine Learning Logistic Regression II)
在<机器学习---逻辑回归(一)(Machine Learning Logistic Regression I)>一文中,我们讨论了如何用逻辑回归解决二分类问题以及逻辑回归算法的本质.现在 ...
- Logistic Regression(逻辑回归)(二)—深入理解
(整理自AndrewNG的课件,转载请注明.整理者:华科小涛@http://www.cnblogs.com/hust-ghtao/) 上一篇讲解了Logistic Regression的基础知识,感觉 ...
- stanford coursera 机器学习编程作业 exercise 3(逻辑回归实现多分类问题)
本作业使用逻辑回归(logistic regression)和神经网络(neural networks)识别手写的阿拉伯数字(0-9) 关于逻辑回归的一个编程练习,可参考:http://www.cnb ...
- scikit-learn机器学习(二)逻辑回归进行二分类(垃圾邮件分类),二分类性能指标,画ROC曲线,计算acc,recall,presicion,f1
数据来自UCI机器学习仓库中的垃圾信息数据集 数据可从http://archive.ics.uci.edu/ml/datasets/sms+spam+collection下载 转成csv载入数据 im ...
- 机器学习作业(二)逻辑回归——Python(numpy)实现
题目太长啦!文档下载[传送门] 第1题 简述:实现逻辑回归. 此处使用了minimize函数代替Matlab的fminunc函数,参考了该博客[传送门]. import numpy as np imp ...
- 机器学习作业(二)逻辑回归——Matlab实现
题目太长啦!文档下载[传送门] 第1题 简述:实现逻辑回归. 第1步:加载数据文件: data = load('ex2data1.txt'); X = data(:, [1, 2]); y = dat ...
随机推荐
- HDU 4747 Mex【线段树上二分+扫描线】
[题意概述] 一个区间的Mex为这个区间没有出现过的最小自然数,现在给你一个序列,要求求出所有区间的Mex的和. [题解] 扫描线+线段树. 我们在线段树上维护从当前左端点开始的前缀Mex,显然从左到 ...
- Symmetry
Description The figure shown on the left is left-right symmetric as it is possible to fold the sheet ...
- 如何实现IIS 7.0对非HTTP协议的支持
在<再谈IIS与ASP.NET管道>介绍各种版本的IIS的设计时,我们谈到IIS 7.0因引入WAS提供了对非HTTP协议的支持.这个对于WCF的服务寄宿来说意义重大,它意味着我们通过II ...
- HDU 4803 贪心
尽可能的让当前的平均值接近最后的平均值才能最快达到终点的情况 #include <cstdio> #include <cstring> #include <iostrea ...
- DRF JWT的用法 & Django的自定义认证类 & DRF 缓存
JWT 相关信息可参考: https://www.jianshu.com/p/576dbf44b2ae DRF JWT 的使用方法: 1. 安装 DRF JWT # pip install djang ...
- 听dalao讲课 7.27
1.高斯消元&线性基 也就是打大暴力啊 所谓的高斯消元也就是加减消元嘛,我的意识流高斯消元是可以的,没听到HY神犇讲,LZHdalao讲得很好,其实就是\(O(n^3)\)的暴力,别的地方一直 ...
- vim状态栏的扩充
将以下内容添加到~/.vimrc文件中: set statusline= set statusline+=%7*\[%n] " ...
- Spring Boot - how to configure port
https://stackoverflow.com/questions/21083170/spring-boot-how-to-configure-port
- [bzoj2506]calc_分块处理
calc bzoj-2506 题目大意:给一个长度为n的非负整数序列A1,A2,…,An.现有m个询问,每次询问给出l,r,p,k,问满足l<=i<=r且Ai mod p = k的值i的个 ...
- Java并发包——线程通信
Java并发包——线程通信 摘要:本文主要学习了Java并发包里有关线程通信的一些知识. 部分内容来自以下博客: https://www.cnblogs.com/skywang12345/p/3496 ...