Skip to content

Commit 4880ead

Browse files
author
Sumedh Wale
committed
Merge remote-tracking branch 'origin/master' into SNAP-2366
2 parents 27e1e29 + 9299a80 commit 4880ead

File tree

105 files changed

+2104
-1986
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

105 files changed

+2104
-1986
lines changed

NOTICE

Lines changed: 292 additions & 296 deletions
Large diffs are not rendered by default.

build.gradle

Lines changed: 265 additions & 210 deletions
Large diffs are not rendered by default.

cluster/build.gradle

Lines changed: 3 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -128,10 +128,11 @@ dependencies {
128128
exclude(group: 'com.sun.jersey.contribs')
129129
exclude(group: 'com.google.protobuf', module: 'protobuf-java')
130130
exclude(group: 'com.jcraft', module: 'jsch')
131+
exclude(group: 'org.apache.directory.server', module: 'apacheds-kerberos-codec')
131132
}
132133

133134
testCompile project(':dunit')
134-
testCompile 'it.unimi.dsi:fastutil:8.2.2'
135+
testCompile "it.unimi.dsi:fastutil:${fastutilVersion}"
135136
testCompile "org.scalatest:scalatest_${scalaBinaryVersion}:${scalatestVersion}"
136137

137138
if (new File(rootDir, 'aqp/build.gradle').exists() && rootProject.hasProperty('snappydata.enterprise')) {
@@ -143,7 +144,7 @@ dependencies {
143144

144145
// Creates the version properties file and writes it to the resources dir
145146
task createVersionPropertiesFile(dependsOn: 'processResources') {
146-
def propertiesDir = file("${buildDir}/classes/main/io/snappydata")
147+
def propertiesDir = file("${sourceSets.main.scala.outputDir}/io/snappydata")
147148
outputs.file "${propertiesDir}/SnappyDataVersion.properties"
148149
inputs.file "${rootProject.projectDir}/build.gradle"
149150

cluster/src/dunit/scala/io/snappydata/cluster/DDLRoutingDUnitTest.scala

Lines changed: 49 additions & 14 deletions
Original file line numberDiff line numberDiff line change
@@ -19,10 +19,12 @@ package io.snappydata.cluster
1919
import java.sql.{Connection, DriverManager, SQLException}
2020

2121
import com.pivotal.gemfirexd.internal.engine.{GfxdConstants, Misc}
22+
import io.snappydata.SnappyFunSuite.resultSetToDataset
2223
import io.snappydata.test.dunit.{AvailablePortHelper, SerializableRunnable}
2324

24-
import org.apache.spark.sql.SnappyContext
2525
import org.apache.spark.sql.collection.Utils
26+
import org.apache.spark.sql.store.ViewTest
27+
import org.apache.spark.sql.{Dataset, Row, SnappyContext, SnappySession}
2628

2729
class DDLRoutingDUnitTest(val s: String) extends ClusterManagerTestBase(s) {
2830

@@ -72,12 +74,12 @@ class DDLRoutingDUnitTest(val s: String) extends ClusterManagerTestBase(s) {
7274
val conn = getANetConnection(netPort1)
7375

7476
// first fail a statement
75-
failCreateTableXD(conn, tableName, true, " row ")
77+
failCreateTableXD(conn, tableName, doFail = true, " row ")
7678

7779
createTableXD(conn, tableName, " row ")
7880
tableMetadataAssertRowTable("APP", tableName)
7981
// Test create table - error for recreate
80-
failCreateTableXD(conn, tableName, false, " row ")
82+
failCreateTableXD(conn, tableName, doFail = false, " row ")
8183

8284
// Drop Table and Recreate
8385
dropTableXD(conn, tableName)
@@ -167,7 +169,7 @@ class DDLRoutingDUnitTest(val s: String) extends ClusterManagerTestBase(s) {
167169

168170
vm2.invoke(classOf[ClusterManagerTestBase], "stopAny")
169171
val props = bootProps.clone().asInstanceOf[java.util.Properties]
170-
props.put("distributed-system-id" , "1")
172+
props.put("distributed-system-id", "1")
171173
props.put("server-groups", "sg1")
172174

173175
val restartServer = new SerializableRunnable() {
@@ -185,7 +187,7 @@ class DDLRoutingDUnitTest(val s: String) extends ClusterManagerTestBase(s) {
185187
var s = conn.createStatement()
186188
s.execute(s"CREATE TABLE $tableName (Col1 INT, Col2 INT, Col3 STRING)")
187189
insertDataXD(conn, tableName)
188-
var snc = org.apache.spark.sql.SnappyContext(sc)
190+
val snc = org.apache.spark.sql.SnappyContext(sc)
189191
verifyResultAndSchema(snc, tableName, 3)
190192

191193
s.execute(s"ALTER TABLE $tableName ADD Col4 INT")
@@ -207,21 +209,21 @@ class DDLRoutingDUnitTest(val s: String) extends ClusterManagerTestBase(s) {
207209
s.execute(s"insert into $tableName values (1,1)")
208210
s.execute(s"ALTER TABLE $tableName add constraint emp_uk unique (Col1)")
209211
try {
210-
s.execute(s"insert into $tableName values (1,1)")
212+
s.execute(s"insert into $tableName values (1,1)")
211213
} catch {
212214
case sqle: SQLException =>
213215
if (sqle.getSQLState != "23505" ||
214-
!sqle.getMessage.contains("duplicate key value in a unique or" +
215-
" primary key constraint or unique index")) {
216+
!sqle.getMessage.contains("duplicate key value in a unique or" +
217+
" primary key constraint or unique index")) {
216218
throw sqle
217219
}
218220
}
219221

220222
// asynceventlistener
221223
s.execute("CREATE ASYNCEVENTLISTENER myListener (" +
222-
" listenerclass 'com.pivotal.gemfirexd.callbacks.DBSynchronizer'" +
223-
" initparams 'org.apache.derby.jdbc.EmbeddedDriver,jdbc:derby:newDB;create=true')" +
224-
" server groups(sg1)")
224+
" listenerclass 'com.pivotal.gemfirexd.callbacks.DBSynchronizer'" +
225+
" initparams 'org.apache.derby.jdbc.EmbeddedDriver,jdbc:derby:newDB;create=true')" +
226+
" server groups(sg1)")
225227

226228
s.execute(s"ALTER TABLE $tableName SET ASYNCEVENTLISTENER (myListener) ")
227229
var rs = s.executeQuery(s"select * from SYS.SYSTABLES where tablename='$tableName'")
@@ -287,7 +289,8 @@ class DDLRoutingDUnitTest(val s: String) extends ClusterManagerTestBase(s) {
287289
var cnt = 0
288290
while (rs.next()) {
289291
cnt += 1
290-
rs.getInt(1); rs.getInt(2);
292+
rs.getInt(1)
293+
rs.getInt(2)
291294
}
292295
assert(cnt == 5, cnt)
293296

@@ -296,7 +299,9 @@ class DDLRoutingDUnitTest(val s: String) extends ClusterManagerTestBase(s) {
296299
cnt = 0
297300
while (rs2.next()) {
298301
cnt += 1
299-
rs2.getInt(1); rs2.getInt(2); rs2.getInt(3);
302+
rs2.getInt(1)
303+
rs2.getInt(2)
304+
rs2.getInt(3)
300305
}
301306
assert(cnt == 5, cnt)
302307

@@ -324,6 +329,36 @@ class DDLRoutingDUnitTest(val s: String) extends ClusterManagerTestBase(s) {
324329
dropTableXD(conn, tableName)
325330
}
326331

332+
def testViews(): Unit = {
333+
val netPort1 = AvailablePortHelper.getRandomAvailableTCPPort
334+
vm2.invoke(classOf[ClusterManagerTestBase], "startNetServer", netPort1)
335+
336+
val session = new SnappySession(sc)
337+
ViewTest.createTables(session)
338+
339+
def newExecution(): String => Dataset[Row] = {
340+
val session = new SnappySession(sc)
341+
val conn = getANetConnection(netPort1)
342+
val stmt = conn.createStatement()
343+
resultSetToDataset(session, stmt)
344+
}
345+
346+
val conn = getANetConnection(netPort1)
347+
val stmt = conn.createStatement()
348+
ViewTest.testTemporaryView(resultSetToDataset(session, stmt), newExecution)
349+
ViewTest.testGlobalTemporaryView(resultSetToDataset(session, stmt), newExecution)
350+
ViewTest.testTemporaryViewUsing(resultSetToDataset(session, stmt), newExecution)
351+
ViewTest.testGlobalTemporaryViewUsing(resultSetToDataset(session, stmt), newExecution)
352+
ViewTest.testPersistentView(resultSetToDataset(session, stmt), checkPlans = false,
353+
newExecution, restartSpark)
354+
ViewTest.dropTables(new SnappySession(sc))
355+
}
356+
357+
private def restartSpark(): Unit = {
358+
ClusterManagerTestBase.stopAny()
359+
ClusterManagerTestBase.startSnappyLead(ClusterManagerTestBase.locatorPort, bootProps)
360+
}
361+
327362
def createTableXD(conn: Connection, tableName: String,
328363
usingStr: String): Unit = {
329364
val s = conn.createStatement()
@@ -421,7 +456,7 @@ class DDLRoutingDUnitTest(val s: String) extends ClusterManagerTestBase(s) {
421456
s.execute("CREATE EXTERNAL TABLE airlineRef_temp(Code VARCHAR(25), " +
422457
"Description VARCHAR(25)) USING parquet OPTIONS()")
423458
} catch {
424-
case e: java.sql.SQLException =>
459+
case _: java.sql.SQLException =>
425460
// println("Exception stack. create. ex=" + e.getMessage +
426461
// " ,stack=" + ExceptionUtils.getFullStackTrace(e))
427462
}

cluster/src/dunit/scala/io/snappydata/cluster/DistributedIndexDUnitTest.scala

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -114,7 +114,7 @@ class DistributedIndexDUnitTest(s: String) extends ClusterManagerTestBase(s) {
114114
// }
115115

116116
// executeQ(s"select * from $tableName where col2 = 'aaa' ") {
117-
// CreateIndexTest.validateIndex(Seq.empty, tableName)(_)
117+
// CreateIndexTest.validateIndex(Nil, tableName)(_)
118118
// }
119119

120120
executeQ(s"select * from $tableName where col2 = 'bbb' and col3 = 'halo' ") {
@@ -164,7 +164,7 @@ class DistributedIndexDUnitTest(s: String) extends ClusterManagerTestBase(s) {
164164
// }
165165

166166
// executeQ(s"select * from $tableName where col2 = 'aaa' ") {
167-
// CreateIndexTest.validateIndex(Seq.empty, tableName)(_)
167+
// CreateIndexTest.validateIndex(Nil, tableName)(_)
168168
// }
169169

170170
System.setProperty("LOG-NOW", "xxx")
@@ -228,7 +228,7 @@ class DistributedIndexDUnitTest(s: String) extends ClusterManagerTestBase(s) {
228228
// }
229229

230230
// executeQ(s"select * from $tableName where col2 = 'aaa' ") {
231-
// CreateIndexTest.validateIndex(Seq.empty, tableName)(_)
231+
// CreateIndexTest.validateIndex(Nil, tableName)(_)
232232
// }
233233

234234
System.setProperty("LOG-NOW", "xxx")

cluster/src/dunit/scala/io/snappydata/cluster/QueryRoutingDUnitTest.scala

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -761,6 +761,7 @@ class QueryRoutingDUnitTest(val s: String)
761761

762762
TPCHUtils.createAndLoadTables(snc, true)
763763

764+
snc.setConf(Property.EnableExperimentalFeatures.name, "true")
764765
snc.sql(
765766
s"""CREATE INDEX idx_orders_cust ON orders(o_custkey)
766767
options (COLOCATE_WITH 'customer')

cluster/src/dunit/scala/io/snappydata/cluster/SplitSnappyClusterDUnitTest.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -435,7 +435,7 @@ object SplitSnappyClusterDUnitTest
435435
snc.dropTable("splitModeTable1", ifExists = true)
436436

437437
// recreate the dropped table
438-
var expected = Seq.empty[ComplexData]
438+
var expected: Seq[ComplexData] = Nil
439439
if (isComplex) {
440440
expected = createComplexTableUsingDataSourceAPI(snc, "splitModeTable1",
441441
tableType, props)

cluster/src/dunit/scala/org/apache/spark/DynamicJarInstallationDUnitTest.scala

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -67,7 +67,7 @@ class DynamicJarInstallationDUnitTest(val s: String)
6767
var testJar = DynamicJarInstallationDUnitTest.createJarWithClasses(
6868
classNames = Seq("FakeJobClass", "FakeJobClass1"),
6969
toStringValue = "1",
70-
Seq.empty, Seq.empty,
70+
Nil, Nil,
7171
"testJar_SNAPPY_JOB_SERVER_JAR_%s.jar".format(System.currentTimeMillis()))
7272

7373
var jobCompleted = false
@@ -106,7 +106,7 @@ class DynamicJarInstallationDUnitTest(val s: String)
106106
testJar = DynamicJarInstallationDUnitTest.createJarWithClasses(
107107
classNames = Seq("FakeJobClass", "FakeJobClass1"),
108108
toStringValue = "2",
109-
Seq.empty, Seq.empty,
109+
Nil, Nil,
110110
"testJar_SNAPPY_JOB_SERVER_JAR_%s.jar".format(System.currentTimeMillis()))
111111

112112
localProperty = (Seq("app1", DateTime.now) ++ Array[URL](testJar)).mkString(",")
@@ -169,4 +169,4 @@ object DynamicJarInstallationDUnitTest {
169169
else false
170170
}
171171
}
172-
}
172+
}

cluster/src/dunit/scala/org/apache/spark/sql/udf/UserDefinedFunctionsDUnitTest.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -279,7 +279,7 @@ object UserDefinedFunctionsDUnitTest {
279279

280280
def createUDFClass(name: String, code: String): File = {
281281
TestUtils.createCompiledClass(name, destDir,
282-
getJavaSourceFromString(name, code), Seq.empty[URL])
282+
getJavaSourceFromString(name, code), Nil)
283283
}
284284

285285
def createJarFile(files: Seq[File]): String = {

cluster/src/main/java/io/snappydata/gemxd/SnappySystemAdmin.java

Lines changed: 12 additions & 16 deletions
Original file line numberDiff line numberDiff line change
@@ -32,7 +32,7 @@
3232

3333
public class SnappySystemAdmin extends GfxdSystemAdmin {
3434

35-
SnappySystemAdmin() {
35+
private SnappySystemAdmin() {
3636
super();
3737
UTIL_Tools_DSProps = "UTIL_Snappy_Tools_DSProps";
3838
UTIL_DSProps_HelpPost = "UTIL_Snappy_Tools_DSProps_HelpPost";
@@ -91,31 +91,28 @@ public void invoke(String[] args) {
9191

9292
super.invoke(args);
9393
} finally {
94-
// remove zero-sized log-file
95-
if (this.defaultLogFileName != null) {
96-
try {
97-
File logFile = new File(this.defaultLogFileName);
98-
if (logFile.exists() && logFile.isFile() && logFile.length() == 0) {
99-
logFile.delete();
100-
}
101-
} catch (Throwable t) {
102-
// ignore at this point
94+
// remove zero-sized generatedcode.log file
95+
try {
96+
File codeLogFile = new File("generatedcode.log");
97+
if (codeLogFile.exists() && codeLogFile.isFile() && codeLogFile.length() == 0) {
98+
codeLogFile.delete();
10399
}
100+
} catch (Throwable t) {
101+
// ignore at this point
104102
}
105103
}
106104
}
107105

108106
public boolean handleVersion(String[] args) {
109-
String cmd = null;
110-
final ArrayList<String> cmdLine = new ArrayList<String>(Arrays.asList(args));
107+
String cmd;
108+
final ArrayList<String> cmdLine = new ArrayList<>(Arrays.asList(args));
111109
try {
112110
Iterator<String> it = cmdLine.iterator();
113111
while (it.hasNext()) {
114112
String arg = it.next();
115113
if (arg.startsWith("-")) {
116114
checkDashArg(null, arg, it);
117-
}
118-
else {
115+
} else {
119116
break;
120117
}
121118
}
@@ -159,9 +156,8 @@ public boolean handleVersion(String[] args) {
159156
}
160157

161158
if (cmd.equalsIgnoreCase("version")) {
162-
Boolean optionOK = (cmdLine.size() == 0);
159+
boolean optionOK = (cmdLine.size() == 0);
163160
if (cmdLine.size() == 1) {
164-
optionOK = false;
165161
String option = cmdLine.get(0);
166162
if ("CREATE".equals(option) || "FULL".equalsIgnoreCase(option)) {
167163
optionOK = true;

0 commit comments

Comments
 (0)