Reading hadoopConfiguration from Spark.

Read hadoopConfiguration from SparkContext instead of creating a new Configuration directly from Hadoop config files.
This allow us to use hadoop parameters inserted or modified in one of Spark config files. (e.g.: Swift credentials).
This commit is contained in:
Pace Francesco 2015-06-19 15:01:57 +02:00
parent 3eca8d2947
commit 4f4b08a122

View File

@ -143,7 +143,7 @@ abstract class Dataset(
def checkData(): Unit = {
tablesForTest.foreach { table =>
val fs = FileSystem.get(new java.net.URI(table.outputDir), new Configuration())
val fs = FileSystem.get(new java.net.URI(table.outputDir), sparkContext.hadoopConfiguration)
val exists = fs.exists(new Path(table.outputDir))
val wasSuccessful = fs.exists(new Path(s"${table.outputDir}/_SUCCESS"))
@ -302,4 +302,4 @@ abstract class Dataset(
}
new ExperimentStatus
}
}
}