/opt/app/spark-1.4.0-bin-hadoop2.3/bin/sparkR ## 省略 Welcome to SparkR! Spark context is available as sc, SQL context is available as sqlContext
> hiveContext <- sparkRHive.init(sc) 15/07/2817:28:29 INFO hive.HiveContext: Initializing execution hive, version 0.13.1 15/07/2817:28:29 INFO hive.metastore: Trying to connect to metastore with URI thrift://hadoop4:9083 15/07/2817:28:30 INFO hive.metastore: Connected to metastore. 15/07/2817:28:30 INFO session.SessionState: No Tez session required at this point. hive.execution.engine=mr.
# Set this to where Spark is installed Sys.setenv(SPARK_HOME="/Users/shivaram/spark") Sys.setenv(YARN_CONF_DIR="/etc/hadoop/conf") Sys.setenv(SCALA_HOME="/opt/app/scala-2.10.5") # This line loads SparkR from the installed directory .libPaths(c(file.path(Sys.getenv("SPARK_HOME"), "R", "lib"), .libPaths())) library(SparkR) sc <- sparkR.init(master="yarn-client")