@@ -12,6 +12,7 @@ import org.apache.log4j.Logger
1212import scala .collection .JavaConverters ._
1313import com .typesafe .config .{Config , ConfigFactory }
1414import com .vesoft .nebula .algorithm .config .Configs .readConfig
15+ import com .vesoft .nebula .algorithm .config .Configs .getOrElse
1516
1617import scala .collection .mutable
1718
@@ -129,6 +130,51 @@ object LocalConfigEntry {
129130 }
130131}
131132
133+
134+ object HiveConfigEntry {
135+ def apply (config : Config ): HiveConfigEntry = {
136+ // uri of hive metastore. eg: thrift://127.0.0.1:9083
137+ val hiveMetaStoreUris : String = getOrElse(config, " hive.metaStoreUris" , " " )
138+ val readConfigEntry = buildReadConfig(config)
139+ val writeConfigEntry = buildWriteConfig(config)
140+ HiveConfigEntry (hiveMetaStoreUris,readConfigEntry, writeConfigEntry)
141+ }
142+
143+ def buildReadConfig (config : Config ): HiveReadConfigEntry = {
144+ // source data of spark sql
145+ val sql : String = getOrElse(config, " hive.read.sql" , " " )
146+ // the source vertex ID is mapped with the SQL result column name
147+ val srcIdCol : String = getOrElse(config, " hive.read.srcId" , " " )
148+ // the dest vertex ID is mapped with the SQL result column name
149+ val dstIdCol : String = getOrElse(config, " hive.read.dstId" , " " )
150+ // the weight is mapped with the SQL result column name
151+ val weightCol : String = getOrElse(config, " hive.read.weight" , " " )
152+ HiveReadConfigEntry (sql, srcIdCol, dstIdCol, weightCol)
153+ }
154+
155+ def buildWriteConfig (config : Config ): HiveWriteConfigEntry = {
156+ // algo result save to hive table
157+ val dbTableName : String = getOrElse(config, " hive.write.dbTableName" , " " )
158+ // save mode of spark
159+ val saveMode : String = getOrElse(config, " hive.write.saveMode" , " " )
160+ // Whether the table is automatically created
161+ val autoCreateTable : Boolean = getOrElse(config, " hive.write.autoCreateTable" , true )
162+ // algo results dataframe column and hive table column mapping relationships
163+ val resultColumnMapping = mutable.Map [String , String ]()
164+ val mappingKey = " hive.write.resultTableColumnMapping"
165+ if (config.hasPath(mappingKey)) {
166+ val mappingConfig = config.getObject(mappingKey)
167+ for (subkey <- mappingConfig.unwrapped().keySet().asScala) {
168+ val key = s " ${mappingKey}. ${subkey}"
169+ val value = config.getString(key)
170+ resultColumnMapping += subkey -> value
171+ }
172+ }
173+ HiveWriteConfigEntry (dbTableName, saveMode, autoCreateTable, resultColumnMapping)
174+ }
175+
176+ }
177+
132178/**
133179 * SparkConfigEntry support key-value pairs for spark session.
134180 *
@@ -173,6 +219,34 @@ case class LocalConfigEntry(filePath: String,
173219 }
174220}
175221
222+ case class HiveConfigEntry (hiveMetaStoreUris : String ,
223+ hiveReadConfigEntry : HiveReadConfigEntry ,
224+ hiveWriteConfigEntry : HiveWriteConfigEntry ) {
225+ override def toString : String = {
226+ s " HiveConfigEntry: {hiveMetaStoreUris: $hiveMetaStoreUris, read: $hiveReadConfigEntry, write: $hiveWriteConfigEntry} "
227+ }
228+ }
229+
230+ case class HiveReadConfigEntry (sql : String ,
231+ srcIdCol : String = " srcId" ,
232+ dstIdCol : String = " dstId" ,
233+ weightCol : String ) {
234+ override def toString : String = {
235+ s " HiveReadConfigEntry: {sql: $sql, srcIdCol: $srcIdCol, dstIdCol: $dstIdCol, " +
236+ s " weightCol: $weightCol} "
237+ }
238+ }
239+
240+ case class HiveWriteConfigEntry (dbTableName : String ,
241+ saveMode : String ,
242+ autoCreateTable : Boolean ,
243+ resultColumnMapping : mutable.Map [String , String ]) {
244+ override def toString : String = {
245+ s " HiveWriteConfigEntry: {dbTableName: $dbTableName, saveMode= $saveMode, " +
246+ s " autoCreateTable= $autoCreateTable, resultColumnMapping= $resultColumnMapping} "
247+ }
248+ }
249+
176250/**
177251 * NebulaConfigEntry
178252 * @param readConfigEntry config for nebula-spark-connector reader
@@ -218,6 +292,7 @@ case class Configs(sparkConfig: SparkConfigEntry,
218292 dataSourceSinkEntry : DataSourceSinkEntry ,
219293 nebulaConfig : NebulaConfigEntry ,
220294 localConfigEntry : LocalConfigEntry ,
295+ hiveConfigEntry : HiveConfigEntry ,
221296 algorithmConfig : AlgorithmConfigEntry )
222297
223298object Configs {
@@ -237,10 +312,11 @@ object Configs {
237312 val dataSourceEntry = DataSourceSinkEntry (config)
238313 val localConfigEntry = LocalConfigEntry (config)
239314 val nebulaConfigEntry = NebulaConfigEntry (config)
240- val sparkEntry = SparkConfigEntry (config)
241- val algorithmEntry = AlgorithmConfigEntry (config)
315+ val hiveConfigEntry = HiveConfigEntry (config)
316+ val sparkEntry = SparkConfigEntry (config)
317+ val algorithmEntry = AlgorithmConfigEntry (config)
242318
243- Configs (sparkEntry, dataSourceEntry, nebulaConfigEntry, localConfigEntry, algorithmEntry)
319+ Configs (sparkEntry, dataSourceEntry, nebulaConfigEntry, localConfigEntry, hiveConfigEntry, algorithmEntry)
244320 }
245321
246322 /**
@@ -277,15 +353,15 @@ object Configs {
277353 }
278354
279355 /**
280- * Get the value from config by the path. If the path not exist, return the default value.
281- *
282- * @param config The config.
283- * @param path The path of the config.
284- * @param defaultValue The default value for the path.
285- *
286- * @return
287- */
288- private [ this ] def getOrElse [T ](config : Config , path : String , defaultValue : T ): T = {
356+ * Get the value from config by the path. If the path not exist, return the default value.
357+ *
358+ * @param config The config.
359+ * @param path The path of the config.
360+ * @param defaultValue The default value for the path.
361+ *
362+ * @return
363+ */
364+ def getOrElse [T ](config : Config , path : String , defaultValue : T ): T = {
289365 if (config.hasPath(path)) {
290366 config.getAnyRef(path).asInstanceOf [T ]
291367 } else {
0 commit comments