opensearch-project / opensearch-spark

Spark Accelerator framework ; It enables secondary indices to remote data stores.
Apache License 2.0
14 stars 23 forks source link

[BUG] PPL describe couldn't handle fully qualified name #612

Closed seankao-az closed 1 week ago

seankao-az commented 1 month ago

What is the bug? describe myglue_test.default.http_logs

{
  "status": "FAILED",
  "error": "{\"Message\":\"Fail to run query. Cause: Invalid table name: myglue_test.default.http_logs Syntax: [ database_name. ] table_name\"}"
}

As a comparison, the following works: describe default.http_logs

{
    "status": "SUCCESS",
    "schema": [
        {
            "name": "col_name",
            "type": "string"
        },
        {
            "name": "data_type",
            "type": "string"
        },
        {
            "name": "comment",
            "type": "string"
        }
    ],
    "datarows": [
        [
            "@timestamp",
            "timestamp",
            null
        ],
        [
            "clientip",
            "string",
            null
        ],
        [
            "request",
            "string",
            null
        ],
        [
            "status",
            "int",
            null
        ],
        [
            "size",
            "int",
            null
        ],
        [
            "year",
            "int",
            null
        ],
        [
            "month",
            "int",
            null
        ],
        [
            "day",
            "int",
            null
        ],
        [
            "# Partition Information",
            "",
            ""
        ],
        [
            "# col_name",
            "data_type",
            "comment"
        ],
        [
            "year",
            "int",
            null
        ],
        [
            "month",
            "int",
            null
        ],
        [
            "day",
            "int",
            null
        ],
        [
            "",
            "",
            ""
        ],
        [
            "# Detailed Table Information",
            "",
            ""
        ],
        [
            "Catalog",
            "spark_catalog",
            ""
        ],
        [
            "Database",
            "default",
            ""
        ],
        [
            "Table",
            "http_logs",
            ""
        ],
        [
            "Owner",
            "hadoop",
            ""
        ],
        [
            "Created Time",
            "Fri Aug 23 21:47:33 GMT 2024",
            ""
        ],
        [
            "Last Access",
            "UNKNOWN",
            ""
        ],
        [
            "Created By",
            "Spark 3.5.1-amzn-0",
            ""
        ],
        [
            "Type",
            "EXTERNAL",
            ""
        ],
        [
            "Provider",
            "json",
            ""
        ],
        [
            "Table Properties",
            "[isRegisteredWithLakeFormation=false]",
            ""
        ],
        [
            "Location",
            "s3://...",
            ""
        ],
        [
            "Serde Library",
            "org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe",
            ""
        ],
        [
            "InputFormat",
            "org.apache.hadoop.mapred.SequenceFileInputFormat",
            ""
        ],
        [
            "OutputFormat",
            "org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat",
            ""
        ],
        [
            "Storage Properties",
            "[compression=bzip2]",
            ""
        ],
        [
            "Partition Provider",
            "Catalog",
            ""
        ]
    ],
    "total": 31,
    "size": 31
}

What is your host/environment?