Commit ab815341 authored by Jonathan Mace's avatar Jonathan Mace
Browse files

Fix usage of hivecontext

parent 06bd3dae
......@@ -8,6 +8,7 @@ import java.util.List;
import org.apache.commons.lang.StringUtils;
import org.apache.spark.SparkConf;
import org.apache.spark.SparkContext;
import org.apache.spark.sql.hive.HiveContext;
import org.apache.spark.sql.Row;
import org.apache.spark.sql.SQLContext;
import org.apache.spark.sql.hive.HiveContext;
......@@ -33,7 +34,7 @@ public class SparkTPCDSBatchGenerator {
public final TPCDSSettings settings;
public final SparkConf sparkConf;
public final SparkContext sparkContext;
public final SQLContext sqlContext;
public final HiveContext sqlContext;
public final Tables tables;
private SparkTPCDSBatchGenerator(String name, TPCDSSettings settings) {
......
......@@ -30,7 +30,7 @@ public class SparkTPCDSDataGenerator {
public static void generateData(TPCDSSettings settings) {
SparkConf conf = new SparkConf().setAppName("TPC-DS generateData");
SparkContext sc = new SparkContext(conf);
SQLContext sqlContext = new SQLContext(sc);
HiveContext sqlContext = new HiveContext(sc);
Tables tables = new Tables(sqlContext, settings.scaleFactor);
tables.genData(settings.dataLocation, settings.dataFormat, settings.overwrite, settings.partitionTables,
settings.useDoubleForDecimal, settings.clusterByPartitionColumns,
......
......@@ -8,6 +8,7 @@ import org.apache.spark.SparkContext;
import org.apache.spark.sql.Row;
import org.apache.spark.sql.SQLContext;
import org.apache.spark.sql.hive.HiveContext;
import org.apache.spark.sql.hive.HiveContext;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
......@@ -31,7 +32,7 @@ public class SparkTPCDSWorkloadGenerator {
public final TPCDSSettings settings;
public final SparkConf sparkConf;
public final SparkContext sparkContext;
public final SQLContext sqlContext;
public final HiveContext sqlContext;
public final Tables tables;
private SparkTPCDSWorkloadGenerator(String name, TPCDSSettings settings) {
......
......@@ -21,9 +21,10 @@ import org.slf4j.LoggerFactory
import org.apache.spark.sql.functions._
import org.apache.spark.sql.types._
import org.apache.spark.sql.{Row, SQLContext, SaveMode}
import org.apache.spark.sql.hive.HiveContext
import edu.brown.cs.systems.tpcds.Dsdgen
class Tables(sqlContext: SQLContext, scaleFactor: Int) extends Serializable {
class Tables(sqlContext: HiveContext, scaleFactor: Int) extends Serializable {
import sqlContext.implicits._
private val log = LoggerFactory.getLogger(getClass)
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment