Skip to content

Commit d4f4d14

Browse files
fix failed ut
1 parent 53d91c0 commit d4f4d14

File tree

3 files changed

+10
-10
lines changed

3 files changed

+10
-10
lines changed

extensions/spark/kyuubi-spark-connector-hive/src/main/scala/org/apache/kyuubi/spark/connector/hive/KyuubiHiveConnectorConf.scala

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -60,13 +60,13 @@ object KyuubiHiveConnectorConf {
6060

6161
val HIVE_FILE_STATUS_CACHE_SCOPE =
6262
buildConf("spark.sql.kyuubi.hive.file.status.cache.scope")
63-
.doc("The scope of hive file status cache, global, session and none.")
63+
.doc("The scope of hive file status cache, globe and none.")
6464
.version("1.11.0")
6565
.stringConf
6666
.transform(policy => policy.toUpperCase(Locale.ROOT))
6767
.checkValue(
68-
policy => Set("SESSION", "NONE").contains(policy),
68+
policy => Set("GLOBE", "NONE").contains(policy),
6969
"Invalid value for 'spark.sql.kyuubi.hive.file.status.cache.scope'." +
70-
"Valid values are 'SESSION', 'NONE'.")
71-
.createWithDefault("SESSION")
70+
"Valid values are 'GLOBE', 'NONE'.")
71+
.createWithDefault("GLOBE")
7272
}

extensions/spark/kyuubi-spark-connector-hive/src/main/scala/org/apache/kyuubi/spark/connector/hive/read/HiveFileStatusCache.scala

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -37,12 +37,12 @@ import org.apache.kyuubi.spark.connector.hive.KyuubiHiveConnectorConf.HIVE_FILE_
3737
* Because the original FileStatusCache cannot take effect (see https://github.com/apache/kyuubi
3838
* /issues/7192).
3939
*
40-
* The main modification point is that at the session level, the cache key is the qualified name
40+
* The main modification point is that at the globally level, the cache key is the qualified name
4141
* of the table (in the form of `catalog.database.table`) + path. The previous key was an
4242
* object + path generated during initialization, and the current scenario is that FileStatusCache
4343
* is not preserved by the outside, resulting in different keys and ineffective caching.
4444
*
45-
* Use [[HiveFileStatusCache.getOrCreate()]] to construct a session/none shared file status cache.
45+
* Use [[HiveFileStatusCache.getOrCreate()]] to construct a globe/none shared file status cache.
4646
*/
4747
object HiveFileStatusCache {
4848
private var sharedCache: HiveSharedInMemoryCache = _
@@ -61,7 +61,7 @@ object HiveFileStatusCache {
6161
session.sessionState.conf.metadataCacheTTL)
6262
}
6363
conf.getConf(HIVE_FILE_STATUS_CACHE_SCOPE) match {
64-
case "SESSION" => sharedCache.createForNewClient(qualifiedName)
64+
case "GLOBE" => sharedCache.createForNewClient(qualifiedName)
6565
case "NONE" => NoopCache
6666
}
6767
} else {

extensions/spark/kyuubi-spark-connector-hive/src/test/scala/org/apache/kyuubi/spark/connector/hive/HiveFileStatusCacheSuite.scala

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -33,7 +33,7 @@ import org.apache.kyuubi.spark.connector.hive.read.HiveFileStatusCache
3333
class HiveFileStatusCacheSuite extends KyuubiHiveTest {
3434

3535
test("use different cache scope") {
36-
Seq("SESSION", "NONE").foreach { value =>
36+
Seq("GLOBE", "NONE").foreach { value =>
3737
withSparkSession(Map(HIVE_FILE_STATUS_CACHE_SCOPE.key -> value)) { _ =>
3838
val path = new Path("/dummy_tmp", "abc")
3939
val files = (1 to 3).map(_ => new FileStatus())
@@ -44,13 +44,13 @@ class HiveFileStatusCacheSuite extends KyuubiHiveTest {
4444

4545
value match {
4646
// Exactly 3 files are cached.
47-
case "SESSION" =>
47+
case "GLOBE" =>
4848
assert(fileStatusCacheTabel.getLeafFiles(path).get.length === 3)
4949
case "NONE" =>
5050
assert(fileStatusCacheTabel.getLeafFiles(path).isEmpty)
5151
case _ =>
5252
throw new IllegalArgumentException(
53-
s"Unexpected value: '$value'. Only 'SESSION' or 'NONE' are allowed.")
53+
s"Unexpected value: '$value'. Only 'GLOBE' or 'NONE' are allowed.")
5454
}
5555

5656
fileStatusCacheTabel.invalidateAll()

0 commit comments

Comments
 (0)