[2017-11-09 10:17:55,399] [main] [INFO] Parsing command: DELETE FROM TABLE testCarbonOverwrite WHERE SEGMENT.STARTTIME BEFORE '2017-11-09 10:17:54' [Logging.scala]
[2017-11-09 10:17:55,424] [main] [INFO] main Skip CarbonOptimizer [StandardLogService.java]
[2017-11-09 10:17:55,425] [main] [INFO] 0: get_table : db=default tbl=testcarbonoverwrite [HiveMetaStore.java]
[2017-11-09 10:17:55,425] [main] [INFO] ugi=root ip=unknown-ip-addr cmd=get_table : db=default tbl=testcarbonoverwrite [HiveMetaStore.java]
[2017-11-09 10:17:55,434] [main] [INFO] Parsing command: array [Logging.scala]
[2017-11-09 10:17:55,436] [main] [INFO] 0: get_table : db=default tbl=testcarbonoverwrite [HiveMetaStore.java]
[2017-11-09 10:17:55,437] [main] [INFO] ugi=root ip=unknown-ip-addr cmd=get_table : db=default tbl=testcarbonoverwrite [HiveMetaStore.java]
[2017-11-09 10:17:55,444] [main] [INFO] Parsing command: array [Logging.scala]
[2017-11-09 10:17:55,447] [main] [AUDIT] [slave1][root][Thread-1]Delete segment by Id request has been received for default.testcarbonoverwrite [StandardLogService.java]
[2017-11-09 10:17:55,484] [main] [INFO] main Delete segment lock has been successfully acquired [StandardLogService.java]
[2017-11-09 10:17:55,485] [main] [AUDIT] [slave1][root][Thread-1]Delete segment by date is failed. No matching segment found. [StandardLogService.java]
[2017-11-09 10:17:55,486] [main] [INFO] main Table status lock has been successfully released [StandardLogService.java]
[2017-11-09 10:17:55,486] [main] [INFO] main Successfully deleted the lock file /home/xu/local_carbonstore/default/testcarbonoverwrite/delete_segment.lock [StandardLogService.java]
[2017-11-09 10:17:55,486] [main] [INFO] main Delete segments lock has been successfully released [StandardLogService.java]
Exception in thread "main" java.lang.RuntimeException: Delete segment by date is failed. No matching segment found.
at scala.sys.package$.error(package.scala:27)
at org.apache.carbondata.api.CarbonStore$.deleteLoadByDate(CarbonStore.scala:152)
at org.apache.spark.sql.execution.command.management.DeleteLoadByLoadDateCommand.processData(DeleteLoadByLoadDateCommand.scala:42)
at org.apache.spark.sql.execution.command.management.DeleteLoadByLoadDateCommand.run(DeleteLoadByLoadDateCommand.scala:34)
at org.apache.spark.sql.execution.command.ExecutedCommandExec.sideEffectResult$lzycompute(commands.scala:58)
at org.apache.spark.sql.execution.command.ExecutedCommandExec.sideEffectResult(commands.scala:56)
at org.apache.spark.sql.execution.command.ExecutedCommandExec.doExecute(commands.scala:74)
at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:114)
at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:114)
at org.apache.spark.sql.execution.SparkPlan$$anonfun$executeQuery$1.apply(SparkPlan.scala:135)
at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)
at org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:132)
at org.apache.spark.sql.execution.SparkPlan.execute(SparkPlan.scala:113)
at org.apache.spark.sql.execution.QueryExecution.toRdd$lzycompute(QueryExecution.scala:87)
at org.apache.spark.sql.execution.QueryExecution.toRdd(QueryExecution.scala:87)
at org.apache.spark.sql.Dataset.(Dataset.scala:185)
at org.apache.spark.sql.Dataset$.ofRows(Dataset.scala:64)
at org.apache.spark.sql.SparkSession.sql(SparkSession.scala:592)
at chuanyin.xu.updb.process.util.SparkEngine.sql(SparkEngine.java:69)
at chuanyin.xu.updb.process.useless.CarbonOverwriteTest.writeDf2Table(CarbonOverwriteTest.java:90)
at chuanyin.xu.updb.process.useless.CarbonOverwriteTest.main(CarbonOverwriteTest.java:102)
[2017-11-09 10:17:55,399] [main] [INFO] Parsing command: DELETE FROM TABLE testCarbonOverwrite WHERE SEGMENT.STARTTIME BEFORE '2017-11-09 10:17:54' [Logging.scala] [2017-11-09 10:17:55,424] [main] [INFO] main Skip CarbonOptimizer [StandardLogService.java] [2017-11-09 10:17:55,425] [main] [INFO] 0: get_table : db=default tbl=testcarbonoverwrite [HiveMetaStore.java] [2017-11-09 10:17:55,425] [main] [INFO] ugi=root ip=unknown-ip-addr cmd=get_table : db=default tbl=testcarbonoverwrite [HiveMetaStore.java] [2017-11-09 10:17:55,434] [main] [INFO] Parsing command: array [Logging.scala]
[2017-11-09 10:17:55,436] [main] [INFO] 0: get_table : db=default tbl=testcarbonoverwrite [HiveMetaStore.java]
[2017-11-09 10:17:55,437] [main] [INFO] ugi=root ip=unknown-ip-addr cmd=get_table : db=default tbl=testcarbonoverwrite [HiveMetaStore.java]
[2017-11-09 10:17:55,444] [main] [INFO] Parsing command: array [Logging.scala]
[2017-11-09 10:17:55,447] [main] [AUDIT] [slave1][root][Thread-1]Delete segment by Id request has been received for default.testcarbonoverwrite [StandardLogService.java]
[2017-11-09 10:17:55,484] [main] [INFO] main Delete segment lock has been successfully acquired [StandardLogService.java]
[2017-11-09 10:17:55,485] [main] [AUDIT] [slave1][root][Thread-1]Delete segment by date is failed. No matching segment found. [StandardLogService.java]
[2017-11-09 10:17:55,486] [main] [INFO] main Table status lock has been successfully released [StandardLogService.java]
[2017-11-09 10:17:55,486] [main] [INFO] main Successfully deleted the lock file /home/xu/local_carbonstore/default/testcarbonoverwrite/delete_segment.lock [StandardLogService.java]
[2017-11-09 10:17:55,486] [main] [INFO] main Delete segments lock has been successfully released [StandardLogService.java]
Exception in thread "main" java.lang.RuntimeException: Delete segment by date is failed. No matching segment found.
at scala.sys.package$.error(package.scala:27)
at org.apache.carbondata.api.CarbonStore$.deleteLoadByDate(CarbonStore.scala:152)
at org.apache.spark.sql.execution.command.management.DeleteLoadByLoadDateCommand.processData(DeleteLoadByLoadDateCommand.scala:42)
at org.apache.spark.sql.execution.command.management.DeleteLoadByLoadDateCommand.run(DeleteLoadByLoadDateCommand.scala:34)
at org.apache.spark.sql.execution.command.ExecutedCommandExec.sideEffectResult$lzycompute(commands.scala:58)
at org.apache.spark.sql.execution.command.ExecutedCommandExec.sideEffectResult(commands.scala:56)
at org.apache.spark.sql.execution.command.ExecutedCommandExec.doExecute(commands.scala:74)
at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:114)
at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:114)
at org.apache.spark.sql.execution.SparkPlan$$anonfun$executeQuery$1.apply(SparkPlan.scala:135)
at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)
at org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:132)
at org.apache.spark.sql.execution.SparkPlan.execute(SparkPlan.scala:113)
at org.apache.spark.sql.execution.QueryExecution.toRdd$lzycompute(QueryExecution.scala:87)
at org.apache.spark.sql.execution.QueryExecution.toRdd(QueryExecution.scala:87)
at org.apache.spark.sql.Dataset.(Dataset.scala:185)
at org.apache.spark.sql.Dataset$.ofRows(Dataset.scala:64)
at org.apache.spark.sql.SparkSession.sql(SparkSession.scala:592)
at chuanyin.xu.updb.process.util.SparkEngine.sql(SparkEngine.java:69)
at chuanyin.xu.updb.process.useless.CarbonOverwriteTest.writeDf2Table(CarbonOverwriteTest.java:90)
at chuanyin.xu.updb.process.useless.CarbonOverwriteTest.main(CarbonOverwriteTest.java:102)