For now, sql based index creation will still be limited to only covering index on a LogicalRelation, though SparkSql can cover various plans such as Filter & Join .. etc. (will address indexes on arbitrary plan later)
// createIndex API
def createIndex(sourceSql: String, indexConfig: IndexConfig): unit = {
val df = spark.sql(indexConfig.generateSql)
indexManager.create(df, indexConfig.copy(sourceSql = sql))
}
hs.createIndex(sourceSql, IndexConfig(..))
// new field in IndexConfig (to keep the sql in IndexLogEntry)
case class IndexConfig(
indexName: String,
indexedColumns: Seq[String],
includedColumns: Seq[String] = Seq(),
sourceSql: String = "")
// example
val df = spark.read.parquet("table2").createTempView("table2")
hs.createIndex("select * from table2", IndexConfig("indexName", Seq("id"), Seq("name")))