tencentyun / cos-java-sdk-v5

java sdk for qcloud cos v5 (xml api)
MIT License
157 stars 142 forks source link

spark操作hive表任务结束时报错 #82

Closed XuQianJin-Stars closed 9 months ago

XuQianJin-Stars commented 3 years ago

image 在spark任务结束时会报如下错误: 10:40:22[dispatcher-event-loop-0] INFO org.apache.spark.scheduler.OutputCommitCoordinator$OutputCommitCoordinatorEndpoint.logInfo:57 - OutputCommitCoordinator stopped! 10:40:22[Thread-1] INFO org.apache.spark.SparkContext.logInfo:57 - Successfully stopped SparkContext 10:40:22[Thread-1] INFO org.apache.spark.util.ShutdownHookManager.logInfo:57 - Shutdown hook called 10:40:22[Thread-1] INFO org.apache.spark.util.ShutdownHookManager.logInfo:57 - Deleting directory /private/var/folders/9d/qtc20f6x197431jvthgs58zr0000gn/T/spark-f52383f9-6554-42f5-8296-ac65779a77e7 10:40:22[Thread-1] INFO org.apache.hadoop.fs.BufferPool.close:265 - Close a buffer pool instance. 10:40:22[Thread-1] INFO org.apache.hadoop.fs.BufferPool.close:278 - Begin to release the buffers. 10:40:22[Thread-1] ERROR com.qcloud.cos.http.DefaultCosHttpClient.exeute:498 - httpClient execute occur a unknow exception, httpRequest: endpoint: dlf-test-1258469122.cos.ap-beijing.myqcloud.com, resourcepath: /tmp/hive/MjUxMDExMzQ5OkRMRjp6WUJFbUpGZzp1N0N6dWpG/b57883f4-d810-49ed-a920-36e1c7cf92e1, httpMethod: HEAD, headers { Authorization : q-sign-algorithm=sha1&q-ak=AKIDDCvEI6MxqUt2ydvE7jQpwta1B2gS9DF3&q-sign-time=1622428822;1622432422&q-key-time=1622428822;1622432422&q-header-list=host&q-url-param-list=&q-signature=a94b5c55ca67f69330f51ab5efb5321b3992a89e, User-Agent : cos-hadoop-plugin-v5.9.4, Host : dlf-test-1258469122.cos.ap-beijing.myqcloud.com, }, params: { } java.lang.IllegalStateException: Connection pool shut down at com.qcloud.cos.thirdparty.org.apache.http.util.Asserts.check(Asserts.java:34) ~[cos_api-bundle-5.6.42.jar:?] at com.qcloud.cos.thirdparty.org.apache.http.pool.AbstractConnPool.lease(AbstractConnPool.java:189) ~[cos_api-bundle-5.6.42.jar:?] at com.qcloud.cos.thirdparty.org.apache.http.impl.conn.PoolingHttpClientConnectionManager.requestConnection(PoolingHttpClientConnectionManager.java:257) ~[cos_api-bundle-5.6.42.jar:?] at com.qcloud.cos.thirdparty.org.apache.http.impl.execchain.MainClientExec.execute(MainClientExec.java:176) ~[cos_api-bundle-5.6.42.jar:?] at com.qcloud.cos.thirdparty.org.apache.http.impl.execchain.ProtocolExec.execute(ProtocolExec.java:185) ~[cos_api-bundle-5.6.42.jar:?] at com.qcloud.cos.thirdparty.org.apache.http.impl.execchain.RetryExec.execute(RetryExec.java:89) ~[cos_api-bundle-5.6.42.jar:?] at com.qcloud.cos.thirdparty.org.apache.http.impl.execchain.RedirectExec.execute(RedirectExec.java:111) ~[cos_api-bundle-5.6.42.jar:?] at com.qcloud.cos.thirdparty.org.apache.http.impl.client.InternalHttpClient.doExecute(InternalHttpClient.java:185) ~[cos_api-bundle-5.6.42.jar:?] at com.qcloud.cos.thirdparty.org.apache.http.impl.client.CloseableHttpClient.execute(CloseableHttpClient.java:83) ~[cos_api-bundle-5.6.42.jar:?] at com.qcloud.cos.thirdparty.org.apache.http.impl.client.CloseableHttpClient.execute(CloseableHttpClient.java:56) ~[cos_api-bundle-5.6.42.jar:?] at com.qcloud.cos.http.DefaultCosHttpClient.executeOneRequest(DefaultCosHttpClient.java:409) ~[cos_api-bundle-5.6.42.jar:?] at com.qcloud.cos.http.DefaultCosHttpClient.exeute(DefaultCosHttpClient.java:471) [cos_api-bundle-5.6.42.jar:?] at com.qcloud.cos.COSClient.invoke(COSClient.java:653) [cos_api-bundle-5.6.42.jar:?] at com.qcloud.cos.COSClient.getObjectMetadata(COSClient.java:1271) [cos_api-bundle-5.6.42.jar:?] at org.apache.hadoop.fs.CosNativeFileSystemStore.callCOSClientWithRetry(CosNativeFileSystemStore.java:1205) [hadoop-cos-2.7.5-5.9.4.jar:?] at org.apache.hadoop.fs.CosNativeFileSystemStore.queryObjectMetadata(CosNativeFileSystemStore.java:471) [hadoop-cos-2.7.5-5.9.4.jar:?] at org.apache.hadoop.fs.CosNativeFileSystemStore.retrieveMetadata(CosNativeFileSystemStore.java:544) [hadoop-cos-2.7.5-5.9.4.jar:?] at sun.reflect.GeneratedMethodAccessor13.invoke(Unknown Source) ~[?:?] at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[?:1.8.0_251] at java.lang.reflect.Method.invoke(Method.java:498) ~[?:1.8.0_251] at org.apache.hadoop.io.retry.RetryInvocationHandler.invokeMethod(RetryInvocationHandler.java:191) [hadoop-common-2.7.4.jar:?] at org.apache.hadoop.io.retry.RetryInvocationHandler.invoke(RetryInvocationHandler.java:102) [hadoop-common-2.7.4.jar:?] at com.sun.proxy.$Proxy46.retrieveMetadata(Unknown Source) [?:?] at org.apache.hadoop.fs.CosFileSystem.getFileStatus(CosFileSystem.java:428) [hadoop-cos-2.7.5-5.9.4.jar:?] at org.apache.hadoop.fs.FileSystem.exists(FileSystem.java:1426) [hadoop-common-2.7.4.jar:?] at org.apache.hadoop.fs.FileSystem.processDeleteOnExit(FileSystem.java:1409) [hadoop-common-2.7.4.jar:?] at org.apache.hadoop.fs.FileSystem.close(FileSystem.java:2070) [hadoop-common-2.7.4.jar:?] at org.apache.hadoop.fs.CosFileSystem.close(CosFileSystem.java:1160) [hadoop-cos-2.7.5-5.9.4.jar:?] at org.apache.hadoop.fs.FileSystem$Cache.closeAll(FileSystem.java:2760) [hadoop-common-2.7.4.jar:?] at org.apache.hadoop.fs.FileSystem$Cache$ClientFinalizer.run(FileSystem.java:2777) [hadoop-common-2.7.4.jar:?] at org.apache.hadoop.util.ShutdownHookManager$1.run(ShutdownHookManager.java:54) [hadoop-common-2.7.4.jar:?]

XuQianJin-Stars commented 3 years ago

复现代码如下:

import org.apache.spark.SparkConf;
import org.apache.spark.sql.Dataset;
import org.apache.spark.sql.Row;
import org.apache.spark.sql.SaveMode;
import org.apache.spark.sql.SparkSession;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import static com.tencent.dlf.job.Constants.HIVE_METASTORE_URIS;

public class AppMain {
  private static final Logger LOG = LoggerFactory.getLogger(AppMain.class);

  private static String dataSourcePath;   // 数据源路径
  private static String tableName;        // 表名
  private static String sql;              // 要执行的SQL语句
  private static String destPath;         // 输出的目标路径

  public static void initParams(String[] args) throws IllegalArgumentException {
    AppMain.dataSourcePath = "cosn://dlf-test-1258469122/sstest/1.csv";
    AppMain.tableName = "aaa";
    AppMain.sql = "select _c0 as a, _c1 as b, _c2 as c, 1 as year from aaa";
    AppMain.destPath = "cosn://dlf-test-1258469122/sstest/json";
  }

  public static void main(String[] args) {
    // 传入参数初始化
    AppMain.initParams(args);

    SparkConf sparkConf = new SparkConf().setMaster("local[2]");

    SparkSession sparkSession = SparkSession.builder()
        .config(sparkConf)
        .config(HIVE_METASTORE_URIS, "thrift://localhost:9083")
        .config("spark.sql.warehouse.dir", "/tmp/spark-warehouse")
        .enableHiveSupport()
        .getOrCreate();

    // 读取json数据
    Dataset<Row> df = sparkSession.read().csv(AppMain.dataSourcePath);
    df.createOrReplaceTempView(AppMain.tableName);

    Dataset<Row> resultDF = sparkSession.sql(AppMain.sql);
    resultDF.show();

    sparkSession.stop();
  }
}
wenbobuaa commented 2 years ago

这个 java.lang.IllegalStateException: Connection pool shut down 错误原因是在调用 cos 接口的时候,cos client 已经被 shutdown 了。 不知道你这里的 cos 依赖是哪个,可以尝试去腾讯云提个工单,可以详细排查下。