添加配置文件 phoenixConnectMode.scala :

package statistics.benefits

import org.apache.hadoop.conf.Configuration
import org.apache.spark.sql.{DataFrame, SQLContext}
import org.apache.phoenix.spark._

object phoenixConnectMode {

  private val zookeeper = "node3:2181"

  def getMode1(sqlContext: SQLContext, tableName: String, columns: Array[String]): DataFrame = {
    val configuration = new Configuration()
    configuration.set("phoenix.schema.isNamespaceMappingEnabled", "true")
    configuration.set("phoenix.schema.mapSystemTablesToNamespace", "true")
    configuration.set("hbase.zookeeper.quorum", zookeeper)
    val df = sqlContext.phoenixTableAsDataFrame(tableName, columns, conf = configuration)
    df
  }
}

Spark SQL源文件 costDay.scala :

package statistics

import common.util.timeUtil
import org.apache.spark.{SparkConf, SparkContext}
import org.apache.spark.sql.SQLContext
import statistics.benefits.phoenixConnectMode
//import common.util.{phoenixConnectMode, timeUtil}
import common.util.timeUtil
import org.apache.spark.{SparkConf, SparkContext}
import org.apache.spark.sql.SQLContext
import org.apache.spark.sql.functions.col
/*
每天执行
 */
object costDay {
  def main(args: Array[String]): Unit = {

    val conf = new SparkConf()
          .setAppName("fdsf").setMaster("local")
    val sc = new SparkContext(conf)
    val sqlContext = new SQLContext(sc)

//    val df = sqlContext.load(
//      "org.apache.phoenix.spark"
//      , Map("table" -> "ASSET_NORMAL"
//        , "zkUrl" -> "node3,node4,node5:2181")
//    )
val tableName = "ASSET_NORMAL"
    val columes = Array(
      "ID",
      "ASSET_ID",
      "ASSET_NAME",
      "ASSET_FIRST_DEGREE_ID",
      "ASSET_FIRST_DEGREE_NAME",
      "ASSET_SECOND_DEGREE_ID",
      "ASSET_SECOND_DEGREE_NAME",
      "GB_DEGREE_ID",
      "GB_DEGREE_NAME",
      "ASSET_USE_FIRST_DEGREE_ID",
      "ASSET_USE_FIRST_DEGREE_NAME",
      "ASSET_USE_SECOND_DEGREE_ID",
      "ASSET_USE_SECOND_DEGREE_NAME",
      "MANAGEMENT_TYPE_ID",
      "MANAGEMENT_TYPE_NAME",
      "ASSET_MODEL",
      "FACTORY_NUMBER",
      "ASSET_COUNTRY_ID",
      "ASSET_COUNTRY_NAME",
      "MANUFACTURER",
      "SUPPLIER",
      "SUPPLIER_TEL",
      "ORIGINAL_VALUE",
      "USE_DEPARTMENT_ID",
      "USE_DEPARTMENT_NAME",
      "USER_ID",
      "USER_NAME",
      "ASSET_LOCATION_OF_PARK_ID",
      "ASSET_LOCATION_OF_PARK_NAME",
      "ASSET_LOCATION_OF_BUILDING_ID",
      "ASSET_LOCATION_OF_BUILDING_NAME",
      "ASSET_LOCATION_OF_ROOM_ID",
      "ASSET_LOCATION_OF_ROOM_NUMBER",
      "PRODUCTION_DATE",
      "ACCEPTANCE_DATE",
      "REQUISITION_DATE",
      "PERFORMANCE_INDEX",
      "ASSET_STATE_ID",
      "ASSET_STATE_NAME",
      "INSPECTION_TYPE_ID",
      "INSPECTION_TYPE_NAME",
      "SEAL_DATE",
      "SEAL_CAUSE",
      "COST_ITEM_ID",
      "COST_ITEM_NAME",
      "ITEM_COMMENTS",
      "UNSEAL_DATE",
      "SCRAP_DATE",
      "PURCHASE_NUMBER",
      "WARRANTY_PERIOD",
      "DEPRECIABLE_LIVES_ID",
      "DEPRECIABLE_LIVES_NAME",
      "MEASUREMENT_UNITS_ID",
      "MEASUREMENT_UNITS_NAME",
      "ANNEX",
      "REMARK",
      "ACCOUNTING_TYPE_ID",
      "ACCOUNTING_TYPE_NAME",
      "SYSTEM_TYPE_ID",
      "SYSTEM_TYPE_NAME",
      "ASSET_ID_PARENT",
      "CLASSIFIED_LEVEL_ID",
      "CLASSIFIED_LEVEL_NAME",
      "ASSET_PICTURE",
      "MILITARY_SPECIAL_CODE",
      "CHECK_CYCLE_ID",
      "CHECK_CYCLE_NAME",
      "CHECK_DATE",
      "CHECK_EFFECTIVE_DATE",
      "CHECK_MODE_ID",
      "CHECK_MODE_NAME",
      "CHECK_DEPARTMENT_ID",
      "CHECK_DEPARTMENT_NAME",
      "RENT_STATUS_ID",
      "RENT_STATUS_NAME",
      "STORAGE_TIME",
      "UPDATE_USER",
      "UPDATE_TIME",
      "IS_ON_PROCESS",
      "IS_DELETED",
      "FIRST_DEPARTMENT_ID",
      "FIRST_DEPARTMENT_NAME",
      "SECOND_DEPARTMENT_ID",
      "SECOND_DEPARTMENT_NAME",
      "CREATE_USER",
      "CREATE_TIME"
    )
    val df = phoenixConnectMode.getMode1(sqlContext, tableName, columes)
      .filter(col("USE_DEPARTMENT_ID") isNotNull)
    df.registerTempTable("asset_normal")
    //    df.show(false)

    def costingWithin(originalValue: Double, years: Int): Double =  (originalValue*0.95)/(years*365)
    sqlContext.udf.register("costingWithin", costingWithin _)

    def costingBeyond(originalValue: Double): Double = originalValue*0.05/365
    sqlContext.udf.register("costingBeyond", costingBeyond _)

    def expire(acceptanceDate: String, years: Int): Boolean = timeUtil.dateStrAddYears2TimeStamp(acceptanceDate, timeUtil.SECOND_TIME_FORMAT, years) > System.currentTimeMillis()
    sqlContext.udf.register("expire", expire _)

    val costDay = sqlContext
      .sql(
        "select " +
          "ID" +
          ",USE_DEPARTMENT_ID as FIRST_DEPARTMENT_ID" +
          ",case when expire(ACCEPTANCE_DATE, DEPRECIABLE_LIVES_NAME) then costingWithin(ORIGINAL_VALUE, DEPRECIABLE_LIVES_NAME) else costingBeyond(ORIGINAL_VALUE) end as ACTUAL_COST" +
          ",ORIGINAL_VALUE" +
          ",current_timestamp() as GENERATION_TIME" +
          " from asset_normal"
      )

    costDay.show(false)
//    costDay.write
//      .format("org.apache.phoenix.spark")
//      .mode("overwrite")
//      .option("table", "ASSET_FINANCIAL_DETAIL_DAY")
//      .option("zkUrl", "node3,node4,node5:2181")
//      .save()
  }
}

返回信息:Process finished with exit code 0,执行成功!

执行结果:

C:\3rd\Java\jdk1..0_212\bin\java.exe  -classpath C:\3rd\Java\jdk1..0_212\jre\lib\charsets.jar;C:\3rd\Java\jdk1..0_212\jre\lib\deploy.jar;C:\3rd\Java\jdk1..0_212\jre\lib\ext\access-bridge-.jar;C:\3rd\Java\jdk1..0_212\jre\lib\ext\cldrdata.jar;C:\3rd\Java\jdk1..0_212\jre\lib\ext\dnsns.jar;C:\3rd\Java\jdk1..0_212\jre\lib\ext\jaccess.jar;C:\3rd\Java\jdk1..0_212\jre\lib\ext\jfxrt.jar;C:\3rd\Java\jdk1..0_212\jre\lib\ext\localedata.jar;C:\3rd\Java\jdk1..0_212\jre\lib\ext\nashorn.jar;C:\3rd\Java\jdk1..0_212\jre\lib\ext\sunec.jar;C:\3rd\Java\jdk1..0_212\jre\lib\ext\sunjce_provider.jar;C:\3rd\Java\jdk1..0_212\jre\lib\ext\sunmscapi.jar;C:\3rd\Java\jdk1..0_212\jre\lib\ext\sunpkcs11.jar;C:\3rd\Java\jdk1..0_212\jre\lib\ext\zipfs.jar;C:\3rd\Java\jdk1..0_212\jre\lib\javaws.jar;C:\3rd\Java\jdk1..0_212\jre\lib\jce.jar;C:\3rd\Java\jdk1..0_212\jre\lib\jfr.jar;C:\3rd\Java\jdk1..0_212\jre\lib\jfxswt.jar;C:\3rd\Java\jdk1..0_212\jre\lib\jsse.jar;C:\3rd\Java\jdk1..0_212\jre\lib\management-agent.jar;C:\3rd\Java\jdk1..0_212\jre\lib\plugin.jar;C:\3rd\Java\jdk1..0_212\jre\lib\resources.jar;C:\3rd\Java\jdk1..0_212\jre\lib\rt.jar;C:\development\statistics\target\classes;C:\development\MavenRepository\org\apache\spark\spark-core_2.\-cdh5.--cdh5.-cdh5.-cdh5.-cdh5.-cdh5.-cdh5.-cdh5.\\chill_2.-.jar;C:\development\MavenRepository\com\esotericsoftware\kryo\kryo\\chill-java-.jar;C:\development\MavenRepository\org\apache\xbean\xbean-asm5-shaded\-cdh5.-cdh5.-cdh5.-cdh5.\xercesImpl-.jar;C:\development\MavenRepository\xml-apis\xml-apis\\xml-apis-.jar;C:\development\MavenRepository\org\apache\hadoop\hadoop-mapreduce-client-app\-cdh5.-cdh5.-cdh5.-cdh5.-cdh5.-cdh5.-cdh5.-cdh5.-cdh5.-cdh5.-cdh5.-cdh5.-cdh5.-cdh5.-cdh5.-cdh5.\aws-java-sdk-bundle-.jar;C:\development\MavenRepository\org\apache\spark\spark-launcher_2.\-cdh5.--cdh5.\-cdh5.--cdh5.\jackson-annotations-.jar;C:\development\MavenRepository\org\apache\spark\spark-network-shuffle_2.\-cdh5.--cdh5.\-cdh5.--cdh5.\jets3t-.jar;C:\development\MavenRepository\org\apache\httpcomponents\httpcore\\httpcore-.jar;C:\development\MavenRepository\com\jamesmurty\utils\java-xmlbuilder\\curator-recipes-.jar;C:\development\MavenRepository\org\apache\curator\curator-framework\\curator-framework-.jar;C:\development\MavenRepository\org\apache\zookeeper\zookeeper\\zookeeper-.jar;C:\development\MavenRepository\org\eclipse\jetty\orbit\javax.servlet\.v201112011016\javax.servlet-.v201112011016.jar;C:\development\MavenRepository\org\apache\commons\commons-lang3\\commons-lang3-.jar;C:\development\MavenRepository\org\apache\commons\commons-math3\\commons-math3-.jar;C:\development\MavenRepository\com\google\code\findbugs\jsr305\\jsr305-.jar;C:\development\MavenRepository\org\slf4j\slf4j-api\\slf4j-api-.jar;C:\development\MavenRepository\org\slf4j\jul-to-slf4j\\jul-to-slf4j-.jar;C:\development\MavenRepository\org\slf4j\jcl-over-slf4j\\jcl-over-slf4j-.jar;C:\development\MavenRepository\log4j\log4j\\log4j-.jar;C:\development\MavenRepository\org\slf4j\slf4j-log4j12\\slf4j-log4j12-.jar;C:\development\MavenRepository\com\ning\compress-lzf\\compress-lzf-.jar;C:\development\MavenRepository\org\xerial\snappy\snappy-java\\lz4-.jar;C:\development\MavenRepository\org\roaringbitmap\RoaringBitmap\\RoaringBitmap-.jar;C:\development\MavenRepository\commons-net\commons-net\\-shaded-protobuf\akka-remote_2.--shaded-protobuf.jar;C:\development\MavenRepository\org\spark-project\akka\akka-actor_2.\-shaded-protobuf\akka-actor_2.--shaded-protobuf.jar;C:\development\MavenRepository\com\typesafe\config\\config-.jar;C:\development\MavenRepository\org\spark-project\protobuf\protobuf-java\-shaded\protobuf-java--shaded.jar;C:\development\MavenRepository\org\uncommons\maths\uncommons-maths\\-shaded-protobuf\akka-slf4j_2.--shaded-protobuf.jar;C:\development\MavenRepository\org\scala-lang\scala-library\\scala-library-.jar;C:\development\MavenRepository\org\json4s\json4s-jackson_2.\\json4s-jackson_2.-.jar;C:\development\MavenRepository\org\json4s\json4s-core_2.\\json4s-core_2.-.jar;C:\development\MavenRepository\org\json4s\json4s-ast_2.\\json4s-ast_2.-.jar;C:\development\MavenRepository\org\scala-lang\scalap\\scalap-.jar;C:\development\MavenRepository\org\scala-lang\scala-compiler\\scala-compiler-.jar;C:\development\MavenRepository\com\sun\jersey\jersey-server\\mesos--shaded-protobuf.jar;C:\development\MavenRepository\io\netty\netty-all\.Final\netty-all-.Final.jar;C:\development\MavenRepository\com\clearspring\analytics\stream\\stream-.jar;C:\development\MavenRepository\io\dropwizard\metrics\metrics-core\\metrics-core-.jar;C:\development\MavenRepository\io\dropwizard\metrics\metrics-jvm\\metrics-jvm-.jar;C:\development\MavenRepository\io\dropwizard\metrics\metrics-json\\metrics-json-.jar;C:\development\MavenRepository\io\dropwizard\metrics\metrics-graphite\\metrics-graphite-.jar;C:\development\MavenRepository\com\fasterxml\jackson\core\jackson-databind\\jackson-databind-.jar;C:\development\MavenRepository\com\fasterxml\jackson\core\jackson-core\\jackson-core-.jar;C:\development\MavenRepository\com\fasterxml\jackson\module\jackson-module-scala_2.\\jackson-module-scala_2.-.jar;C:\development\MavenRepository\com\thoughtworks\paranamer\paranamer\\ivy-.jar;C:\development\MavenRepository\oro\oro\\oro-.jar;C:\development\MavenRepository\org\tachyonproject\tachyon-client\\tachyon-client-.jar;C:\development\MavenRepository\commons-lang\commons-lang\\tachyon-underfs-hdfs-.jar;C:\development\MavenRepository\org\tachyonproject\tachyon-underfs-s3\\tachyon-underfs-s3-.jar;C:\development\MavenRepository\org\tachyonproject\tachyon-underfs-local\\tachyon-underfs-local-.jar;C:\development\MavenRepository\net\razorvine\pyrolite\\chimera-.jar;C:\development\MavenRepository\org\spark-project\spark\unused\\unused-.jar;C:\development\MavenRepository\org\apache\spark\spark-sql_2.\-cdh5.--cdh5.\-cdh5.--cdh5.\scala-reflect-.jar;C:\development\MavenRepository\org\codehaus\janino\janino\\janino-.jar;C:\development\MavenRepository\org\codehaus\janino\commons-compiler\\commons-compiler-.jar;C:\development\MavenRepository\com\twitter\parquet-column\-cdh5.-cdh5.-cdh5.-cdh5.-cdh5.-cdh5.-cdh5.-cdh5.-cdh5.-cdh5.-cdh5.-cdh5.-cdh5.-cdh5.-cdh5.-cdh5..cloudera.\jetty-.cloudera..jar;C:\development\MavenRepository\org\mortbay\jetty\jetty-util\.cloudera.\jetty-util-.cloudera..jar;C:\development\MavenRepository\com\sun\jersey\jersey-json\-\jaxb-impl--.jar;C:\development\MavenRepository\tomcat\jasper-compiler\\jasper-compiler-.jar;C:\development\MavenRepository\tomcat\jasper-runtime\\jasper-runtime-.jar;C:\development\MavenRepository\commons-el\commons-el\\commons-beanutils-.jar;C:\development\MavenRepository\commons-beanutils\commons-beanutils-core\\commons-beanutils-core-.jar;C:\development\MavenRepository\com\google\code\gson\gson\\gson-.jar;C:\development\MavenRepository\org\apache\hadoop\hadoop-auth\-cdh5.-cdh5.-M15\apacheds-kerberos-codec--M15.jar;C:\development\MavenRepository\org\apache\directory\server\apacheds-i18n\-M15\apacheds-i18n--M15.jar;C:\development\MavenRepository\org\apache\directory\api\api-asn1-api\-M20\api-asn1-api--M20.jar;C:\development\MavenRepository\org\apache\directory\api\api-util\-M20\api-util--M20.jar;C:\development\MavenRepository\com\jcraft\jsch\\jsch-.jar;C:\development\MavenRepository\org\apache\curator\curator-client\\curator-client-.jar;C:\development\MavenRepository\org\apache\htrace\htrace-core4\-incubating\htrace-core4--incubating.jar;C:\development\MavenRepository\org\apache\commons\commons-compress\\commons-compress-.jar;C:\development\MavenRepository\org\tukaani\xz\-cdh5.-cdh5.\jcodings-.jar;C:\development\MavenRepository\com\yammer\metrics\metrics-core\\metrics-core-.jar;C:\development\MavenRepository\org\apache\hbase\hbase-protocol\-cdh5.-cdh5.-cdh5.-cdh5.-cdh5.-cdh5.-cdh5.-cdh5.-cdh5.-cdh5.\high-scale-lib-.jar;C:\development\MavenRepository\org\apache\commons\commons-math\.cloudera.\jetty-sslengine-.cloudera..jar;C:\development\MavenRepository\org\mortbay\jetty\jsp-\jsp-.jar;C:\development\MavenRepository\org\mortbay\jetty\jsp-api-\jsp-api-.jar;C:\development\MavenRepository\org\mortbay\jetty\servlet-api-\servlet-api-.jar;C:\development\MavenRepository\org\codehaus\jackson\jackson-jaxrs\\jackson-jaxrs-.jar;C:\development\MavenRepository\org\jamon\jamon-runtime\\jamon-runtime-.jar;C:\development\MavenRepository\org\hamcrest\hamcrest-core\-mr1-cdh5.-mr1-cdh5.\core-.jar;C:\development\MavenRepository\org\apache\hadoop\hadoop-hdfs\-cdh5.-cdh5.\commons-daemon-.jar;C:\development\MavenRepository\com\google\protobuf\protobuf-java\\protobuf-java-.jar;C:\development\MavenRepository\commons-logging\commons-logging\-\findbugs-annotations--.jar;C:\development\MavenRepository\org\apache\phoenix\phoenix-spark\-cdh5.-cdh5.\disruptor-.jar;C:\development\MavenRepository\org\apache\phoenix\phoenix-core\-cdh5.-cdh5.-incubating\tephra-api--incubating.jar;C:\development\MavenRepository\org\apache\tephra\tephra-core\-incubating\tephra-core--incubating.jar;C:\development\MavenRepository\com\google\inject\guice\\javax.inject-.jar;C:\development\MavenRepository\aopalliance\aopalliance\\libthrift-.jar;C:\development\MavenRepository\it\unimi\dsi\fastutil\\fastutil-.jar;C:\development\MavenRepository\org\apache\twill\twill-common\\twill-common-.jar;C:\development\MavenRepository\org\apache\twill\twill-core\\twill-core-.jar;C:\development\MavenRepository\org\apache\twill\twill-api\\twill-api-.jar;C:\development\MavenRepository\org\ow2\asm\asm-all\\asm-all-.jar;C:\development\MavenRepository\org\apache\twill\twill-discovery-api\\twill-discovery-api-.jar;C:\development\MavenRepository\org\apache\twill\twill-discovery-core\\twill-discovery-core-.jar;C:\development\MavenRepository\org\apache\twill\twill-zookeeper\\twill-zookeeper-.jar;C:\development\MavenRepository\org\apache\tephra\tephra-hbase-compat--incubating\tephra-hbase-compat--incubating.jar;C:\development\MavenRepository\org\antlr\antlr-runtime\\antlr-runtime-.jar;C:\development\MavenRepository\jline\jline\\sqlline-.jar;C:\development\MavenRepository\com\google\guava\guava\\guava-.jar;C:\development\MavenRepository\joda-\jcip-annotations-.jar;C:\development\MavenRepository\org\codehaus\jackson\jackson-core-asl\\jackson-core-asl-.jar;C:\development\MavenRepository\org\codehaus\jackson\jackson-mapper-asl\\jackson-mapper-asl-.jar;C:\development\MavenRepository\junit\junit\\httpclient-.jar;C:\development\MavenRepository\org\iq80\snappy\snappy\-incubating\htrace-core--incubating.jar;C:\development\MavenRepository\commons-cli\commons-cli\\commons-collections-.jar;C:\development\MavenRepository\org\apache\commons\commons-csv\-cdh5.-cdh5..0_212\lib\tools.jar;C:\development\MavenRepository\org\apache\hbase\hbase-common\-cdh5.-cdh5.-cdh5.-cdh5.-cdh5.-cdh5.-cdh5.-cdh5.\joni-.jar;C:\development\MavenRepository\com\salesforce\i18n\i18n-util\\i18n-util-.jar;C:\development\MavenRepository\com\ibm\icu\icu4j\-cdh5.-cdh5.-cdh5.-cdh5.\jaxb-api-.jar;C:\development\MavenRepository\javax\xml\stream\stax-api\\stax-api-.jar;C:\development\MavenRepository\javax\activation\activation\\jackson-xc-.jar;C:\development\MavenRepository\com\sun\jersey\contribs\jersey-guice\-cdh5.-cdh5..Final\netty-.Final.jar;C:\development\MavenRepository\mysql\mysql-connector-java\\mysql-connector-java-.jar statistics.costDay
// :: INFO spark.SparkContext: Running Spark version
// :: INFO spark.SecurityManager: Changing view acls to: cf_pc
// :: INFO spark.SecurityManager: Changing modify acls to: cf_pc
// :: INFO spark.SecurityManager: SecurityManager: authentication disabled; ui acls disabled; users with view permissions: Set(cf_pc); users with modify permissions: Set(cf_pc)
// :: INFO util.Utils: Successfully started service .
// :: INFO slf4j.Slf4jLogger: Slf4jLogger started
// :: INFO Remoting: Starting remoting
// :: INFO Remoting: Remoting started; listening on addresses :[akka.tcp://sparkDriverActorSystem@10.200.74.156:56580]
// :: INFO Remoting: Remoting now listens on addresses: [akka.tcp://sparkDriverActorSystem@10.200.74.156:56580]
// :: INFO util.Utils: Successfully started service .
// :: INFO spark.SparkEnv: Registering MapOutputTracker
// :: INFO spark.SparkEnv: Registering BlockManagerMaster
// :: INFO storage.DiskBlockManager: Created local directory at C:\Users\cf_pc\AppData\Local\Temp\blockmgr-76b81e6b-b765--a1c0-5078286a7cfc
// :: INFO storage.MemoryStore: MemoryStore started with capacity 478.2 MB
// :: INFO spark.SparkEnv: Registering OutputCommitCoordinator
// :: INFO server.Server: jetty-.y.z-SNAPSHOT
// :: INFO server.AbstractConnector: Started SelectChannelConnector@
// :: INFO util.Utils: Successfully started service .
// :: INFO ui.SparkUI: Started SparkUI at http://10.200.74.156:4040
// :: INFO executor.Executor: Starting executor ID driver on host localhost
// :: INFO util.Utils: Successfully started service .
// :: INFO netty.NettyBlockTransferService: Server created on
// :: INFO storage.BlockManagerMaster: Trying to register BlockManager
// :: INFO storage.BlockManagerMasterEndpoint: Registering block manager localhost: with )
// :: INFO storage.BlockManagerMaster: Registered BlockManager
// :: INFO storage.MemoryStore: Block broadcast_0 stored as values in memory (estimated size 247.5 KB, free 477.9 MB)
// :: INFO storage.MemoryStore: Block broadcast_0_piece0 stored as bytes in memory (estimated size 15.4 KB, free 477.9 MB)
// :: INFO storage.BlockManagerInfo: Added broadcast_0_piece0  (size: 15.4 KB, free: 478.2 MB)
// :: INFO spark.SparkContext: Created broadcast  from newAPIHadoopRDD at PhoenixRDD.scala:
// :: INFO jdbc.PhoenixEmbeddedDriver$ConnectionInfo: Trying to connect to a secure cluster as  with keytab /hbase
// :: INFO jdbc.PhoenixEmbeddedDriver$ConnectionInfo: Successful login to secure cluster
// :: INFO log.QueryLoggerDisruptor: Starting  QueryLoggerDisruptor , waitStrategy=BlockingWaitStrategy, exceptionHandler=org.apache.phoenix.log.QueryLoggerDefaultExceptionHandler@59edb4f5...
// :: INFO query.ConnectionQueryServicesImpl: An instance of ConnectionQueryServices was created.
// :: INFO zookeeper.RecoverableZooKeeper: Process identifier=hconnection-
// :: INFO zookeeper.ZooKeeper: Client environment:zookeeper.version=-, built on // : GMT
// :: INFO zookeeper.ZooKeeper: Client environment:host.name=DESKTOP-0CDQ4PM
// :: INFO zookeeper.ZooKeeper: Client environment:java.version=1.8.0_212
// :: INFO zookeeper.ZooKeeper: Client environment:java.vendor=Oracle Corporation
// :: INFO zookeeper.ZooKeeper: Client environment:java.home=C:\3rd\Java\jdk1..0_212\jre
// :: INFO zookeeper.ZooKeeper: Client environment:java.class.path=C:\3rd\Java\jdk1..0_212\jre\lib\charsets.jar;C:\3rd\Java\jdk1..0_212\jre\lib\deploy.jar;C:\3rd\Java\jdk1..0_212\jre\lib\ext\access-bridge-.jar;C:\3rd\Java\jdk1..0_212\jre\lib\ext\cldrdata.jar;C:\3rd\Java\jdk1..0_212\jre\lib\ext\dnsns.jar;C:\3rd\Java\jdk1..0_212\jre\lib\ext\jaccess.jar;C:\3rd\Java\jdk1..0_212\jre\lib\ext\jfxrt.jar;C:\3rd\Java\jdk1..0_212\jre\lib\ext\localedata.jar;C:\3rd\Java\jdk1..0_212\jre\lib\ext\nashorn.jar;C:\3rd\Java\jdk1..0_212\jre\lib\ext\sunec.jar;C:\3rd\Java\jdk1..0_212\jre\lib\ext\sunjce_provider.jar;C:\3rd\Java\jdk1..0_212\jre\lib\ext\sunmscapi.jar;C:\3rd\Java\jdk1..0_212\jre\lib\ext\sunpkcs11.jar;C:\3rd\Java\jdk1..0_212\jre\lib\ext\zipfs.jar;C:\3rd\Java\jdk1..0_212\jre\lib\javaws.jar;C:\3rd\Java\jdk1..0_212\jre\lib\jce.jar;C:\3rd\Java\jdk1..0_212\jre\lib\jfr.jar;C:\3rd\Java\jdk1..0_212\jre\lib\jfxswt.jar;C:\3rd\Java\jdk1..0_212\jre\lib\jsse.jar;C:\3rd\Java\jdk1..0_212\jre\lib\management-agent.jar;C:\3rd\Java\jdk1..0_212\jre\lib\plugin.jar;C:\3rd\Java\jdk1..0_212\jre\lib\resources.jar;C:\3rd\Java\jdk1..0_212\jre\lib\rt.jar;C:\development\statistics\target\classes;C:\development\MavenRepository\org\apache\spark\spark-core_2.\-cdh5.--cdh5.-cdh5.-cdh5.-cdh5.-cdh5.-cdh5.-cdh5.\\chill_2.-.jar;C:\development\MavenRepository\com\esotericsoftware\kryo\kryo\\chill-java-.jar;C:\development\MavenRepository\org\apache\xbean\xbean-asm5-shaded\-cdh5.-cdh5.-cdh5.-cdh5.\xercesImpl-.jar;C:\development\MavenRepository\xml-apis\xml-apis\\xml-apis-.jar;C:\development\MavenRepository\org\apache\hadoop\hadoop-mapreduce-client-app\-cdh5.-cdh5.-cdh5.-cdh5.-cdh5.-cdh5.-cdh5.-cdh5.-cdh5.-cdh5.-cdh5.-cdh5.-cdh5.-cdh5.-cdh5.-cdh5.\aws-java-sdk-bundle-.jar;C:\development\MavenRepository\org\apache\spark\spark-launcher_2.\-cdh5.--cdh5.\-cdh5.--cdh5.\jackson-annotations-.jar;C:\development\MavenRepository\org\apache\spark\spark-network-shuffle_2.\-cdh5.--cdh5.\-cdh5.--cdh5.\jets3t-.jar;C:\development\MavenRepository\org\apache\httpcomponents\httpcore\\httpcore-.jar;C:\development\MavenRepository\com\jamesmurty\utils\java-xmlbuilder\\curator-recipes-.jar;C:\development\MavenRepository\org\apache\curator\curator-framework\\curator-framework-.jar;C:\development\MavenRepository\org\apache\zookeeper\zookeeper\\zookeeper-.jar;C:\development\MavenRepository\org\eclipse\jetty\orbit\javax.servlet\.v201112011016\javax.servlet-.v201112011016.jar;C:\development\MavenRepository\org\apache\commons\commons-lang3\\commons-lang3-.jar;C:\development\MavenRepository\org\apache\commons\commons-math3\\commons-math3-.jar;C:\development\MavenRepository\com\google\code\findbugs\jsr305\\jsr305-.jar;C:\development\MavenRepository\org\slf4j\slf4j-api\\slf4j-api-.jar;C:\development\MavenRepository\org\slf4j\jul-to-slf4j\\jul-to-slf4j-.jar;C:\development\MavenRepository\org\slf4j\jcl-over-slf4j\\jcl-over-slf4j-.jar;C:\development\MavenRepository\log4j\log4j\\log4j-.jar;C:\development\MavenRepository\org\slf4j\slf4j-log4j12\\slf4j-log4j12-.jar;C:\development\MavenRepository\com\ning\compress-lzf\\compress-lzf-.jar;C:\development\MavenRepository\org\xerial\snappy\snappy-java\\lz4-.jar;C:\development\MavenRepository\org\roaringbitmap\RoaringBitmap\\RoaringBitmap-.jar;C:\development\MavenRepository\commons-net\commons-net\\-shaded-protobuf\akka-remote_2.--shaded-protobuf.jar;C:\development\MavenRepository\org\spark-project\akka\akka-actor_2.\-shaded-protobuf\akka-actor_2.--shaded-protobuf.jar;C:\development\MavenRepository\com\typesafe\config\\config-.jar;C:\development\MavenRepository\org\spark-project\protobuf\protobuf-java\-shaded\protobuf-java--shaded.jar;C:\development\MavenRepository\org\uncommons\maths\uncommons-maths\\-shaded-protobuf\akka-slf4j_2.--shaded-protobuf.jar;C:\development\MavenRepository\org\scala-lang\scala-library\\scala-library-.jar;C:\development\MavenRepository\org\json4s\json4s-jackson_2.\\json4s-jackson_2.-.jar;C:\development\MavenRepository\org\json4s\json4s-core_2.\\json4s-core_2.-.jar;C:\development\MavenRepository\org\json4s\json4s-ast_2.\\json4s-ast_2.-.jar;C:\development\MavenRepository\org\scala-lang\scalap\\scalap-.jar;C:\development\MavenRepository\org\scala-lang\scala-compiler\\scala-compiler-.jar;C:\development\MavenRepository\com\sun\jersey\jersey-server\\mesos--shaded-protobuf.jar;C:\development\MavenRepository\io\netty\netty-all\.Final\netty-all-.Final.jar;C:\development\MavenRepository\com\clearspring\analytics\stream\\stream-.jar;C:\development\MavenRepository\io\dropwizard\metrics\metrics-core\\metrics-core-.jar;C:\development\MavenRepository\io\dropwizard\metrics\metrics-jvm\\metrics-jvm-.jar;C:\development\MavenRepository\io\dropwizard\metrics\metrics-json\\metrics-json-.jar;C:\development\MavenRepository\io\dropwizard\metrics\metrics-graphite\\metrics-graphite-.jar;C:\development\MavenRepository\com\fasterxml\jackson\core\jackson-databind\\jackson-databind-.jar;C:\development\MavenRepository\com\fasterxml\jackson\core\jackson-core\\jackson-core-.jar;C:\development\MavenRepository\com\fasterxml\jackson\module\jackson-module-scala_2.\\jackson-module-scala_2.-.jar;C:\development\MavenRepository\com\thoughtworks\paranamer\paranamer\\ivy-.jar;C:\development\MavenRepository\oro\oro\\oro-.jar;C:\development\MavenRepository\org\tachyonproject\tachyon-client\\tachyon-client-.jar;C:\development\MavenRepository\commons-lang\commons-lang\\tachyon-underfs-hdfs-.jar;C:\development\MavenRepository\org\tachyonproject\tachyon-underfs-s3\\tachyon-underfs-s3-.jar;C:\development\MavenRepository\org\tachyonproject\tachyon-underfs-local\\tachyon-underfs-local-.jar;C:\development\MavenRepository\net\razorvine\pyrolite\\chimera-.jar;C:\development\MavenRepository\org\spark-project\spark\unused\\unused-.jar;C:\development\MavenRepository\org\apache\spark\spark-sql_2.\-cdh5.--cdh5.\-cdh5.--cdh5.\scala-reflect-.jar;C:\development\MavenRepository\org\codehaus\janino\janino\\janino-.jar;C:\development\MavenRepository\org\codehaus\janino\commons-compiler\\commons-compiler-.jar;C:\development\MavenRepository\com\twitter\parquet-column\-cdh5.-cdh5.-cdh5.-cdh5.-cdh5.-cdh5.-cdh5.-cdh5.-cdh5.-cdh5.-cdh5.-cdh5.-cdh5.-cdh5.-cdh5.-cdh5..cloudera.\jetty-.cloudera..jar;C:\development\MavenRepository\org\mortbay\jetty\jetty-util\.cloudera.\jetty-util-.cloudera..jar;C:\development\MavenRepository\com\sun\jersey\jersey-json\-\jaxb-impl--.jar;C:\development\MavenRepository\tomcat\jasper-compiler\\jasper-compiler-.jar;C:\development\MavenRepository\tomcat\jasper-runtime\\jasper-runtime-.jar;C:\development\MavenRepository\commons-el\commons-el\\commons-beanutils-.jar;C:\development\MavenRepository\commons-beanutils\commons-beanutils-core\\commons-beanutils-core-.jar;C:\development\MavenRepository\com\google\code\gson\gson\\gson-.jar;C:\development\MavenRepository\org\apache\hadoop\hadoop-auth\-cdh5.-cdh5.-M15\apacheds-kerberos-codec--M15.jar;C:\development\MavenRepository\org\apache\directory\server\apacheds-i18n\-M15\apacheds-i18n--M15.jar;C:\development\MavenRepository\org\apache\directory\api\api-asn1-api\-M20\api-asn1-api--M20.jar;C:\development\MavenRepository\org\apache\directory\api\api-util\-M20\api-util--M20.jar;C:\development\MavenRepository\com\jcraft\jsch\\jsch-.jar;C:\development\MavenRepository\org\apache\curator\curator-client\\curator-client-.jar;C:\development\MavenRepository\org\apache\htrace\htrace-core4\-incubating\htrace-core4--incubating.jar;C:\development\MavenRepository\org\apache\commons\commons-compress\\commons-compress-.jar;C:\development\MavenRepository\org\tukaani\xz\-cdh5.-cdh5.\jcodings-.jar;C:\development\MavenRepository\com\yammer\metrics\metrics-core\\metrics-core-.jar;C:\development\MavenRepository\org\apache\hbase\hbase-protocol\-cdh5.-cdh5.-cdh5.-cdh5.-cdh5.-cdh5.-cdh5.-cdh5.-cdh5.-cdh5.\high-scale-lib-.jar;C:\development\MavenRepository\org\apache\commons\commons-math\.cloudera.\jetty-sslengine-.cloudera..jar;C:\development\MavenRepository\org\mortbay\jetty\jsp-\jsp-.jar;C:\development\MavenRepository\org\mortbay\jetty\jsp-api-\jsp-api-.jar;C:\development\MavenRepository\org\mortbay\jetty\servlet-api-\servlet-api-.jar;C:\development\MavenRepository\org\codehaus\jackson\jackson-jaxrs\\jackson-jaxrs-.jar;C:\development\MavenRepository\org\jamon\jamon-runtime\\jamon-runtime-.jar;C:\development\MavenRepository\org\hamcrest\hamcrest-core\-mr1-cdh5.-mr1-cdh5.\core-.jar;C:\development\MavenRepository\org\apache\hadoop\hadoop-hdfs\-cdh5.-cdh5.\commons-daemon-.jar;C:\development\MavenRepository\com\google\protobuf\protobuf-java\\protobuf-java-.jar;C:\development\MavenRepository\commons-logging\commons-logging\-\findbugs-annotations--.jar;C:\development\MavenRepository\org\apache\phoenix\phoenix-spark\-cdh5.-cdh5.\disruptor-.jar;C:\development\MavenRepository\org\apache\phoenix\phoenix-core\-cdh5.-cdh5.-incubating\tephra-api--incubating.jar;C:\development\MavenRepository\org\apache\tephra\tephra-core\-incubating\tephra-core--incubating.jar;C:\development\MavenRepository\com\google\inject\guice\\javax.inject-.jar;C:\development\MavenRepository\aopalliance\aopalliance\\libthrift-.jar;C:\development\MavenRepository\it\unimi\dsi\fastutil\\fastutil-.jar;C:\development\MavenRepository\org\apache\twill\twill-common\\twill-common-.jar;C:\development\MavenRepository\org\apache\twill\twill-core\\twill-core-.jar;C:\development\MavenRepository\org\apache\twill\twill-api\\twill-api-.jar;C:\development\MavenRepository\org\ow2\asm\asm-all\\asm-all-.jar;C:\development\MavenRepository\org\apache\twill\twill-discovery-api\\twill-discovery-api-.jar;C:\development\MavenRepository\org\apache\twill\twill-discovery-core\\twill-discovery-core-.jar;C:\development\MavenRepository\org\apache\twill\twill-zookeeper\\twill-zookeeper-.jar;C:\development\MavenRepository\org\apache\tephra\tephra-hbase-compat--incubating\tephra-hbase-compat--incubating.jar;C:\development\MavenRepository\org\antlr\antlr-runtime\\antlr-runtime-.jar;C:\development\MavenRepository\jline\jline\\sqlline-.jar;C:\development\MavenRepository\com\google\guava\guava\\guava-.jar;C:\development\MavenRepository\joda-\jcip-annotations-.jar;C:\development\MavenRepository\org\codehaus\jackson\jackson-core-asl\\jackson-core-asl-.jar;C:\development\MavenRepository\org\codehaus\jackson\jackson-mapper-asl\\jackson-mapper-asl-.jar;C:\development\MavenRepository\junit\junit\\httpclient-.jar;C:\development\MavenRepository\org\iq80\snappy\snappy\-incubating\htrace-core--incubating.jar;C:\development\MavenRepository\commons-cli\commons-cli\\commons-collections-.jar;C:\development\MavenRepository\org\apache\commons\commons-csv\-cdh5.-cdh5..0_212\lib\tools.jar;C:\development\MavenRepository\org\apache\hbase\hbase-common\-cdh5.-cdh5.-cdh5.-cdh5.-cdh5.-cdh5.-cdh5.-cdh5.\joni-.jar;C:\development\MavenRepository\com\salesforce\i18n\i18n-util\\i18n-util-.jar;C:\development\MavenRepository\com\ibm\icu\icu4j\-cdh5.-cdh5.-cdh5.-cdh5.\jaxb-api-.jar;C:\development\MavenRepository\javax\xml\stream\stax-api\\stax-api-.jar;C:\development\MavenRepository\javax\activation\activation\\jackson-xc-.jar;C:\development\MavenRepository\com\sun\jersey\contribs\jersey-guice\-cdh5.-cdh5..Final\netty-.Final.jar;C:\development\MavenRepository\mysql\mysql-connector-java\\mysql-connector-java-.jar;C:\3rd\JetBrains\IntelliJ IDEA \lib\idea_rt.jar
// :: INFO zookeeper.ZooKeeper: Client environment:java.library.path=C:\3rd\Java\jdk1..0_212\bin;C:\WINDOWS\Sun\Java\bin;C:\WINDOWS\system32;C:\WINDOWS;C:\3rd\Anaconda2;C:\3rd\Anaconda2\Library\mingw-w64\bin;C:\3rd\Anaconda2\Library\usr\bin;C:\3rd\Anaconda2\Library\bin;C:\3rd\Anaconda2\Scripts;C:\Program Files (x86)\Intel\Intel(R) Management Engine Components\iCLS\;C:\Program Files\Intel\Intel(R) Management Engine Components\iCLS\;C:\WINDOWS\system32;C:\WINDOWS;C:\WINDOWS\System32\Wbem;C:\WINDOWS\System32\WindowsPowerShell\v1.\;C:\Program Files (x86)\Intel\Intel(R) Management Engine Components\DAL;C:\Program Files\Intel\Intel(R) Management Engine Components\DAL;C:\Program Files (x86)\Intel\Intel(R) Management Engine Components\IPT;C:\Program Files\Intel\Intel(R) Management Engine Components\IPT;C:\3rd\MATLAB\R2016a\runtime\win64;C:\3rd\MATLAB\R2016a\bin;C:\3rd\MATLAB\R2016a\polyspace\bin;C:\WINDOWS\System32\OpenSSH\;C:\Program Files\TortoiseSVN\bin;C:\Users\cf_pc\Documents\Caffe\Release;C:\3rd\scala\scala-\bin;C:\3rd\Java\jdk1..0_212\bin;C:\development\apache-maven-\bin;C:\3rd\mysql--winx64\bin;C:\Program Files\Intel\WiFi\bin\;C:\Program Files\Common Files\Intel\WirelessCommon\;C:\3rd\hadoop-common-\bin;C:\Users\cf_pc\AppData\Local\Microsoft\WindowsApps;;C:\Program Files\Microsoft VS Code\bin;.
// :: INFO zookeeper.ZooKeeper: Client environment:java.io.tmpdir=C:\Users\cf_pc\AppData\Local\Temp\
// :: INFO zookeeper.ZooKeeper: Client environment:java.compiler=<NA>
// :: INFO zookeeper.ZooKeeper: Client environment:os.name=Windows
// :: INFO zookeeper.ZooKeeper: Client environment:os.arch=amd64
// :: INFO zookeeper.ZooKeeper: Client environment:os.version=10.0
// :: INFO zookeeper.ZooKeeper: Client environment:user.name=cf_pc
// :: INFO zookeeper.ZooKeeper: Client environment:user.home=C:\Users\cf_pc
// :: INFO zookeeper.ZooKeeper: Client environment:user.dir=C:\development\statistics
// :: INFO zookeeper.ZooKeeper: Initiating client connection, connectString=node3: sessionTimeout= watcher=hconnection-0x73041b7d0x0, quorum=node3:, baseZNode=/hbase
// :: INFO zookeeper.ClientCnxn: Opening socket connection to server node3/. Will not attempt to authenticate using SASL (unknown error)
// :: INFO zookeeper.ClientCnxn: Socket connection established to node3/, initiating session
// :: INFO zookeeper.ClientCnxn: Session establishment complete on server node3/, sessionid =
// :: INFO query.ConnectionQueryServicesImpl: HConnection established. Stacktrace )
org.apache.phoenix.util.LogUtil.getCallerStackTrace(LogUtil.java:)
org.apache.phoenix.query.ConnectionQueryServicesImpl.openConnection(ConnectionQueryServicesImpl.java:)
org.apache.phoenix.query.ConnectionQueryServicesImpl.access$(ConnectionQueryServicesImpl.java:)
org.apache.phoenix.query.ConnectionQueryServicesImpl$.call(ConnectionQueryServicesImpl.java:)
org.apache.phoenix.query.ConnectionQueryServicesImpl$.call(ConnectionQueryServicesImpl.java:)
org.apache.phoenix.util.PhoenixContextExecutor.call(PhoenixContextExecutor.java:)
org.apache.phoenix.query.ConnectionQueryServicesImpl.init(ConnectionQueryServicesImpl.java:)
org.apache.phoenix.jdbc.PhoenixDriver.getConnectionQueryServices(PhoenixDriver.java:)
org.apache.phoenix.jdbc.PhoenixEmbeddedDriver.createConnection(PhoenixEmbeddedDriver.java:)
org.apache.phoenix.jdbc.PhoenixDriver.connect(PhoenixDriver.java:)
java.sql.DriverManager.getConnection(DriverManager.java:)
java.sql.DriverManager.getConnection(DriverManager.java:)
org.apache.phoenix.mapreduce.util.ConnectionUtil.getConnection(ConnectionUtil.java:)
org.apache.phoenix.mapreduce.util.ConnectionUtil.getInputConnection(ConnectionUtil.java:)
org.apache.phoenix.mapreduce.util.PhoenixConfigurationUtil.getSelectColumnMetadataList(PhoenixConfigurationUtil.java:)
org.apache.phoenix.spark.PhoenixRDD.toDataFrame(PhoenixRDD.scala:)
org.apache.phoenix.spark.SparkSqlContextFunctions.phoenixTableAsDataFrame(SparkSqlContextFunctions.scala:)
statistics.benefits.phoenixConnectMode$.getMode1(phoenixConnectMode.scala:)
statistics.costDay$.main(costDay.scala:)
statistics.costDay.main(costDay.scala)

// :: INFO Configuration.deprecation: hadoop.native.lib is deprecated. Instead, use io.native.lib.available
// :: INFO mapreduce.PhoenixInputFormat: UseSelectColumns=, selectColumnList=ID,ASSET_ID,ASSET_NAME,ASSET_FIRST_DEGREE_ID,ASSET_FIRST_DEGREE_NAME,ASSET_SECOND_DEGREE_ID,ASSET_SECOND_DEGREE_NAME,GB_DEGREE_ID,GB_DEGREE_NAME,ASSET_USE_FIRST_DEGREE_ID,ASSET_USE_FIRST_DEGREE_NAME,ASSET_USE_SECOND_DEGREE_ID,ASSET_USE_SECOND_DEGREE_NAME,MANAGEMENT_TYPE_ID,MANAGEMENT_TYPE_NAME,ASSET_MODEL,FACTORY_NUMBER,ASSET_COUNTRY_ID,ASSET_COUNTRY_NAME,MANUFACTURER,SUPPLIER,SUPPLIER_TEL,ORIGINAL_VALUE,USE_DEPARTMENT_ID,USE_DEPARTMENT_NAME,USER_ID,USER_NAME,ASSET_LOCATION_OF_PARK_ID,ASSET_LOCATION_OF_PARK_NAME,ASSET_LOCATION_OF_BUILDING_ID,ASSET_LOCATION_OF_BUILDING_NAME,ASSET_LOCATION_OF_ROOM_ID,ASSET_LOCATION_OF_ROOM_NUMBER,PRODUCTION_DATE,ACCEPTANCE_DATE,REQUISITION_DATE,PERFORMANCE_INDEX,ASSET_STATE_ID,ASSET_STATE_NAME,INSPECTION_TYPE_ID,INSPECTION_TYPE_NAME,SEAL_DATE,SEAL_CAUSE,COST_ITEM_ID,COST_ITEM_NAME,ITEM_COMMENTS,UNSEAL_DATE,SCRAP_DATE,PURCHASE_NUMBER,WARRANTY_PERIOD,DEPRECIABLE_LIVES_ID,DEPRECIABLE_LIVES_NAME,MEASUREMENT_UNITS_ID,MEASUREMENT_UNITS_NAME,ANNEX,REMARK,ACCOUNTING_TYPE_ID,ACCOUNTING_TYPE_NAME,SYSTEM_TYPE_ID,SYSTEM_TYPE_NAME,ASSET_ID_PARENT,CLASSIFIED_LEVEL_ID,CLASSIFIED_LEVEL_NAME,ASSET_PICTURE,MILITARY_SPECIAL_CODE,CHECK_CYCLE_ID,CHECK_CYCLE_NAME,CHECK_DATE,CHECK_EFFECTIVE_DATE,CHECK_MODE_ID,CHECK_MODE_NAME,CHECK_DEPARTMENT_ID,CHECK_DEPARTMENT_NAME,RENT_STATUS_ID,RENT_STATUS_NAME,STORAGE_TIME,UPDATE_USER,UPDATE_TIME,IS_ON_PROCESS,IS_DELETED,FIRST_DEPARTMENT_ID,FIRST_DEPARTMENT_NAME,SECOND_DEPARTMENT_ID,SECOND_DEPARTMENT_NAME,CREATE_USER,CREATE_TIME
// :: INFO Configuration.deprecation: io.bytes.per.checksum is deprecated. Instead, use dfs.bytes-per-checksum
// :: INFO jdbc.PhoenixEmbeddedDriver$ConnectionInfo: Trying to connect to a secure cluster as  with keytab /hbase
// :: INFO jdbc.PhoenixEmbeddedDriver$ConnectionInfo: Successful login to secure cluster
// :: INFO Configuration.deprecation: io.bytes.per.checksum is deprecated. Instead, use dfs.bytes-per-checksum
// :: INFO jdbc.PhoenixEmbeddedDriver$ConnectionInfo: Trying to connect to a secure cluster as  with keytab /hbase
// :: INFO jdbc.PhoenixEmbeddedDriver$ConnectionInfo: Successful login to secure cluster
// :: INFO mapreduce.PhoenixInputFormat: UseSelectColumns=, selectColumnList=ID,ASSET_ID,ASSET_NAME,ASSET_FIRST_DEGREE_ID,ASSET_FIRST_DEGREE_NAME,ASSET_SECOND_DEGREE_ID,ASSET_SECOND_DEGREE_NAME,GB_DEGREE_ID,GB_DEGREE_NAME,ASSET_USE_FIRST_DEGREE_ID,ASSET_USE_FIRST_DEGREE_NAME,ASSET_USE_SECOND_DEGREE_ID,ASSET_USE_SECOND_DEGREE_NAME,MANAGEMENT_TYPE_ID,MANAGEMENT_TYPE_NAME,ASSET_MODEL,FACTORY_NUMBER,ASSET_COUNTRY_ID,ASSET_COUNTRY_NAME,MANUFACTURER,SUPPLIER,SUPPLIER_TEL,ORIGINAL_VALUE,USE_DEPARTMENT_ID,USE_DEPARTMENT_NAME,USER_ID,USER_NAME,ASSET_LOCATION_OF_PARK_ID,ASSET_LOCATION_OF_PARK_NAME,ASSET_LOCATION_OF_BUILDING_ID,ASSET_LOCATION_OF_BUILDING_NAME,ASSET_LOCATION_OF_ROOM_ID,ASSET_LOCATION_OF_ROOM_NUMBER,PRODUCTION_DATE,ACCEPTANCE_DATE,REQUISITION_DATE,PERFORMANCE_INDEX,ASSET_STATE_ID,ASSET_STATE_NAME,INSPECTION_TYPE_ID,INSPECTION_TYPE_NAME,SEAL_DATE,SEAL_CAUSE,COST_ITEM_ID,COST_ITEM_NAME,ITEM_COMMENTS,UNSEAL_DATE,SCRAP_DATE,PURCHASE_NUMBER,WARRANTY_PERIOD,DEPRECIABLE_LIVES_ID,DEPRECIABLE_LIVES_NAME,MEASUREMENT_UNITS_ID,MEASUREMENT_UNITS_NAME,ANNEX,REMARK,ACCOUNTING_TYPE_ID,ACCOUNTING_TYPE_NAME,SYSTEM_TYPE_ID,SYSTEM_TYPE_NAME,ASSET_ID_PARENT,CLASSIFIED_LEVEL_ID,CLASSIFIED_LEVEL_NAME,ASSET_PICTURE,MILITARY_SPECIAL_CODE,CHECK_CYCLE_ID,CHECK_CYCLE_NAME,CHECK_DATE,CHECK_EFFECTIVE_DATE,CHECK_MODE_ID,CHECK_MODE_NAME,CHECK_DEPARTMENT_ID,CHECK_DEPARTMENT_NAME,RENT_STATUS_ID,RENT_STATUS_NAME,STORAGE_TIME,UPDATE_USER,UPDATE_TIME,IS_ON_PROCESS,IS_DELETED,FIRST_DEPARTMENT_ID,FIRST_DEPARTMENT_NAME,SECOND_DEPARTMENT_ID,SECOND_DEPARTMENT_NAME,CREATE_USER,CREATE_TIME
// :: INFO mapreduce.PhoenixInputFormat: Select Statement: SELECT "."CREATE_TIME" FROM ASSET_NORMAL
// :: INFO zookeeper.RecoverableZooKeeper: Process identifier=hconnection-
// :: INFO zookeeper.ZooKeeper: Initiating client connection, connectString=node3: sessionTimeout= watcher=hconnection-0x5f8da820x0, quorum=node3:, baseZNode=/hbase
// :: INFO zookeeper.ClientCnxn: Opening socket connection to server node3/. Will not attempt to authenticate using SASL (unknown error)
// :: INFO zookeeper.ClientCnxn: Socket connection established to node3/, initiating session
// :: INFO zookeeper.ClientCnxn: Session establishment complete on server node3/, sessionid =
// :: INFO util.RegionSizeCalculator: Calculating region sizes for table "IDX_ASSET_NORMAL".
// :: INFO client.ConnectionManager$HConnectionImplementation: Closing master protocol: MasterService
// :: INFO client.ConnectionManager$HConnectionImplementation: Closing zookeeper sessionid=0x36ca2ccfed66851
// :: INFO zookeeper.ZooKeeper: Session: 0x36ca2ccfed66851 closed
// :: INFO zookeeper.ClientCnxn: EventThread shut down
// :: INFO spark.SparkContext: Starting job: show at costDay.scala:
// :: INFO scheduler.DAGScheduler: Got job  (show at costDay.scala:) with  output partitions
// :: INFO scheduler.DAGScheduler: Final stage: ResultStage  (show at costDay.scala:)
// :: INFO scheduler.DAGScheduler: Parents of final stage: List()
// :: INFO scheduler.DAGScheduler: Missing parents: List()
// :: INFO scheduler.DAGScheduler: Submitting ResultStage  (MapPartitionsRDD[] at show at costDay.scala:), which has no missing parents
// :: INFO storage.MemoryStore: Block broadcast_1 stored as values in memory (estimated size 23.6 KB, free 477.9 MB)
// :: INFO storage.MemoryStore: Block broadcast_1_piece0 stored as bytes in memory (estimated size 9.0 KB, free 477.9 MB)
// :: INFO storage.BlockManagerInfo: Added broadcast_1_piece0  (size: 9.0 KB, free: 478.1 MB)
// :: INFO spark.SparkContext: Created broadcast  from broadcast at DAGScheduler.scala:
// :: INFO scheduler.DAGScheduler: Submitting  missing tasks from ResultStage  (MapPartitionsRDD[] at show at costDay.scala:) (first  tasks are ))
// :: INFO scheduler.TaskSchedulerImpl: Adding task set  tasks
// :: INFO scheduler.TaskSetManager: Starting task , localhost, executor driver, partition , ANY,  bytes)
// :: INFO executor.Executor: Running task )
// :: INFO rdd.NewHadoopRDD: Input split: org.apache.phoenix.mapreduce.PhoenixInputSplit@20b488
// :: INFO Configuration.deprecation: io.bytes.per.checksum is deprecated. Instead, use dfs.bytes-per-checksum
// :: INFO jdbc.PhoenixEmbeddedDriver$ConnectionInfo: Trying to connect to a secure cluster as  with keytab /hbase
// :: INFO jdbc.PhoenixEmbeddedDriver$ConnectionInfo: Successful login to secure cluster
// :: INFO codegen.GeneratePredicate: Code generated in 219.2327 ms
// :: INFO codegen.GenerateUnsafeProjection: Code generated in 67.2901 ms
// :: INFO codegen.GenerateSafeProjection: Code generated in 12.9857 ms
// :: INFO executor.Executor: Finished task ).  bytes result sent to driver
// :: INFO scheduler.TaskSetManager: Finished task )  ms on localhost (executor driver) (/)
// :: INFO scheduler.TaskSchedulerImpl: Removed TaskSet 0.0, whose tasks have all completed, from pool
// :: INFO scheduler.DAGScheduler: ResultStage  (show at costDay.scala:) finished in 1.000 s
// :: INFO scheduler.DAGScheduler: Job  finished: show at costDay.scala:, took 1.145145 s
+--------------------------------+-------------------+-----------------------+--------------+-----------------------+
|ID                              |FIRST_DEPARTMENT_ID|ACTUAL_COST            |ORIGINAL_VALUE|GENERATION_TIME        |
+--------------------------------+-------------------+-----------------------+--------------+-----------------------+
|0001d3d568924f89b073ab7fd10b67f7||--- ::36.197|
|0004b28d29124bcc954586b481078bc4||-- ::36.197|
|0004c64dddda4c289dfc4a9ffb8f14d8||-- ::36.197|
|0004f257d4b04337bd7a7f46e00e3c72||-- ::36.197|
|00060d2e73314c3a925b0f4408ff8062||-- ::36.197|
|00064088a53247f8b2435586aad51fb1||-- ::36.197|
|0008cf0ea69f4c46b1853af91da12f43||-- ::36.197|
|000aa24e859e4df29f4c14c1f89c5222||-- ::36.197|
|000b893786444874b2be42f13093ac30||-- ::36.197|
|000bce624409453bbc2e29e688b33939||-- ::36.197|
|000c1624c43147859b4a8cc210a9bc3d||-- ::36.197|
|000cd7f91a8b420fa6af8038cd8d2d38||-- ::36.197|
|000f9ce45af141d581f6be97f925efea||--- ::36.197|
|0010d45656fe44bbabbb07554d57938e||-- ::36.197|
|001139ff8a604c3db5c5bb03dfc0b25e||-- ::36.197|
|00138f888e9e47eeb713fdfa946ad958||-- ::36.197|
|0013f431b4444fd6a5e9c99e9f96402b||-- ::36.197|
|00143021a36246dd8ecb07dbe6ece34d||-- ::36.197|
|00178d016bc4459796a74b9dbf8932af||-- ::36.197|
|001e56309b41437385b6b9165b224c32||-- ::36.197|
+--------------------------------+-------------------+-----------------------+--------------+-----------------------+
only showing top  rows

// :: INFO spark.SparkContext: Invoking stop() from shutdown hook
// :: INFO handler.ContextHandler: stopped o.s.j.s.ServletContextHandler{/static/sql,null}
// :: INFO handler.ContextHandler: stopped o.s.j.s.ServletContextHandler{/SQL/execution/json,null}
// :: INFO handler.ContextHandler: stopped o.s.j.s.ServletContextHandler{/SQL/execution,null}
// :: INFO handler.ContextHandler: stopped o.s.j.s.ServletContextHandler{/SQL/json,null}
// :: INFO handler.ContextHandler: stopped o.s.j.s.ServletContextHandler{/SQL,null}
// :: INFO handler.ContextHandler: stopped o.s.j.s.ServletContextHandler{/metrics/json,null}
// :: INFO handler.ContextHandler: stopped o.s.j.s.ServletContextHandler{/stages/stage/kill,null}
// :: INFO handler.ContextHandler: stopped o.s.j.s.ServletContextHandler{/api,null}
// :: INFO handler.ContextHandler: stopped o.s.j.s.ServletContextHandler{/,null}
// :: INFO handler.ContextHandler: stopped o.s.j.s.ServletContextHandler{/static,null}
// :: INFO handler.ContextHandler: stopped o.s.j.s.ServletContextHandler{/executors/threadDump/json,null}
// :: INFO handler.ContextHandler: stopped o.s.j.s.ServletContextHandler{/executors/threadDump,null}
// :: INFO handler.ContextHandler: stopped o.s.j.s.ServletContextHandler{/executors/json,null}
// :: INFO handler.ContextHandler: stopped o.s.j.s.ServletContextHandler{/executors,null}
// :: INFO handler.ContextHandler: stopped o.s.j.s.ServletContextHandler{/environment/json,null}
// :: INFO handler.ContextHandler: stopped o.s.j.s.ServletContextHandler{/environment,null}
// :: INFO handler.ContextHandler: stopped o.s.j.s.ServletContextHandler{/storage/rdd/json,null}
// :: INFO handler.ContextHandler: stopped o.s.j.s.ServletContextHandler{/storage/rdd,null}
// :: INFO handler.ContextHandler: stopped o.s.j.s.ServletContextHandler{/storage/json,null}
// :: INFO handler.ContextHandler: stopped o.s.j.s.ServletContextHandler{/storage,null}
// :: INFO handler.ContextHandler: stopped o.s.j.s.ServletContextHandler{/stages/pool/json,null}
// :: INFO handler.ContextHandler: stopped o.s.j.s.ServletContextHandler{/stages/pool,null}
// :: INFO handler.ContextHandler: stopped o.s.j.s.ServletContextHandler{/stages/stage/json,null}
// :: INFO handler.ContextHandler: stopped o.s.j.s.ServletContextHandler{/stages/stage,null}
// :: INFO handler.ContextHandler: stopped o.s.j.s.ServletContextHandler{/stages/json,null}
// :: INFO handler.ContextHandler: stopped o.s.j.s.ServletContextHandler{/stages,null}
// :: INFO handler.ContextHandler: stopped o.s.j.s.ServletContextHandler{/jobs/job/json,null}
// :: INFO handler.ContextHandler: stopped o.s.j.s.ServletContextHandler{/jobs/job,null}
// :: INFO handler.ContextHandler: stopped o.s.j.s.ServletContextHandler{/jobs/json,null}
// :: INFO handler.ContextHandler: stopped o.s.j.s.ServletContextHandler{/jobs,null}
// :: INFO ui.SparkUI: Stopped Spark web UI at http://10.200.74.156:4040
// :: INFO spark.MapOutputTrackerMasterEndpoint: MapOutputTrackerMasterEndpoint stopped!
// :: INFO storage.MemoryStore: MemoryStore cleared
// :: INFO storage.BlockManager: BlockManager stopped
// :: INFO storage.BlockManagerMaster: BlockManagerMaster stopped
// :: INFO scheduler.OutputCommitCoordinator$OutputCommitCoordinatorEndpoint: OutputCommitCoordinator stopped!
// :: INFO spark.SparkContext: Successfully stopped SparkContext
// :: INFO util.ShutdownHookManager: Shutdown hook called
// :: INFO util.ShutdownHookManager: Deleting directory C:\Users\cf_pc\AppData\Local\Temp\spark-0ab01cf8-9dc3-4ad5-a435-d84098223ee3

Process finished with exit code 

从中得到Spark SQL计算的结果:

+--------------------------------+-------------------+-----------------------+--------------+-----------------------+
|ID                              |FIRST_DEPARTMENT_ID|ACTUAL_COST            |ORIGINAL_VALUE|GENERATION_TIME        |
+--------------------------------+-------------------+-----------------------+--------------+-----------------------+
|0001d3d568924f89b073ab7fd10b67f7||--- ::36.197|
|0004b28d29124bcc954586b481078bc4||-- ::36.197|
|0004c64dddda4c289dfc4a9ffb8f14d8||-- ::36.197|
|0004f257d4b04337bd7a7f46e00e3c72||-- ::36.197|
|00060d2e73314c3a925b0f4408ff8062||-- ::36.197|
|00064088a53247f8b2435586aad51fb1||-- ::36.197|
|0008cf0ea69f4c46b1853af91da12f43||-- ::36.197|
|000aa24e859e4df29f4c14c1f89c5222||-- ::36.197|
|000b893786444874b2be42f13093ac30||-- ::36.197|
|000bce624409453bbc2e29e688b33939||-- ::36.197|
|000c1624c43147859b4a8cc210a9bc3d||-- ::36.197|
|000cd7f91a8b420fa6af8038cd8d2d38||-- ::36.197|
|000f9ce45af141d581f6be97f925efea||--- ::36.197|
|0010d45656fe44bbabbb07554d57938e||-- ::36.197|
|001139ff8a604c3db5c5bb03dfc0b25e||-- ::36.197|
|00138f888e9e47eeb713fdfa946ad958||-- ::36.197|
|0013f431b4444fd6a5e9c99e9f96402b||-- ::36.197|
|00143021a36246dd8ecb07dbe6ece34d||-- ::36.197|
|00178d016bc4459796a74b9dbf8932af||-- ::36.197|
|001e56309b41437385b6b9165b224c32||-- ::36.197|
+--------------------------------+-------------------+-----------------------+--------------+-----------------------+
only showing top  rows

Spark教程——(10)Spark SQL读取Phoenix数据本地执行计算的更多相关文章

  1. spark读取hdfs数据本地性异常

    在分布式计算中,为了提高计算速度,数据本地性是其中重要的一环. 不过有时候它同样也会带来一些问题. 一.问题描述 在分布式计算中,大多数情况下要做到移动计算而非移动数据,所以数据本地性尤其重要,因此我 ...

  2. spark读取hdfs数据本地性异常【转】

    在分布式计算中,为了提高计算速度,数据本地性是其中重要的一环. 不过有时候它同样也会带来一些问题. 一.问题描述 在分布式计算中,大多数情况下要做到移动计算而非移动数据,所以数据本地性尤其重要,因此我 ...

  3. Spark SQL读取hive数据时报找不到mysql驱动

    Exception: Caused by: org.datanucleus.exceptions.NucleusException: Attempt to invoke the "BoneC ...

  4. Spark记录-Spark-Shell客户端操作读取Hive数据

    1.拷贝hive-site.xml到spark/conf下,拷贝mysql-connector-java-xxx-bin.jar到hive/lib下 2.开启hive元数据服务:hive  --ser ...

  5. spark教程(10)-sparkSQL

    sparkSQL 的由来 我们知道最初的计算框架叫 mapreduce,他的缺点是计算速度慢,还有一个就是代码比较麻烦,所以有了 hive: hive 是把类 sql 的语句转换成 mapreduce ...

  6. MS SQL读取JSON数据

    前面有一篇<在SQL中直接把查询结果转换为JSON数据>https://www.cnblogs.com/insus/p/10905566.html,是把table转换为json. 现反过来 ...

  7. 网络教程(10)回顾ARP和ping数据包

    Ping 192.168.20.2 ICMP Echo (Internet Control Message Protocol ICMP Echo request ICMP Echo reply 收到I ...

  8. C# 与Sql server 获取数据和执行命令

    C#数据库命封装在 System.Data.SqlClient 之中: 从使用的数据库中获取连接串connectionstring:"server=xx.xxx.xx.xx,xxxx: da ...

  9. 【Python】sasa版:文件中csv读取在写入csv读取的数据和执行是否成功。

    sasa写的文件(包含解析文字) # coding=utf- from selenium import webdriver from time import sleep import keyword ...

随机推荐

  1. javaScript中的querySelector和querySelectorAll

    querySelector和querySelectorAll是W3C提供的 新的查询接口,其主要特点如下: 1.querySelector只返回匹配的第一个元素,如果没有匹配项,返回null. 2.q ...

  2. P&R 6

    Floorplan: 要做好floorplan需要掌握哪些知识跟技能? 通常,遇到floorplan问题,大致的debug步骤跟方法有哪些? 如何衡量floorplan的QA? Floorplan基本 ...

  3. dbGet(二.一)hinst

    hinst hierarchical insts Parent Object bndry,group,hInstTerm,hTerm, inst,ptn,topCell,vCell Child Obj ...

  4. base64加/解密算法C++实现

    base64编码原理:维基百科 - Base64 其实编码规则很简单,将字符串按每三个字符组成一组,因为每个字符的 ascii 码对应 0~127 之间(显然,不考虑其他字符集编码),即每个字符的二进 ...

  5. JavaScript - what is "this"? this是什么?

    https://fangyinghang.com/this-in-js/ Core func(p1, p2) // 等同于 func.call(undefined, p1, p2) this 就是ca ...

  6. Python实现云服务器防止暴力密码破解

    云服务器防止暴力密码破解 云服务器暴露在公网上,每天都有大量的暴力密码破解,更换端口,无济于事,该脚本监控安全日志,获取暴力破解的对方ip,加入hosts黑名单 路径说明 描述 路径 登录安全日志 / ...

  7. Java_Habse_shell

    import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.*; import org.apache.had ...

  8. 史上最全CentOS6离线安装部署Cloudera Manager5.9.3

    史上最全CentOS6离线安装部署Cloudera Manager5.9.3

  9. Linux虚拟机(CentOS)安装gcc, g++

    1.    确保自己的虚拟机联网 点击那个三角形可以选择连接网络 如果还是连不了网,参考https://www.cnblogs.com/xingbo/p/6100554.html 2.联网后,使用命令 ...

  10. python基础(一)--python介绍

    1. Python语言 1.1 编程语言 语言是人类最重要的交际工具,是人类之间进行信息交换的主要表达方式. 编程语言是用来定义计算机程序的语言,用来向计算机发出指令. 1.2 Python语言 Py ...