Closed zhenik closed 4 years ago
This fix optimize test performance
==> default: Running provisioner: ansible_local... default: Running ansible-playbook... PLAY [all] ********************************************************************* TASK [Gathering Facts] ********************************************************* [DEPRECATION WARNING]: Distribution Ubuntu 18.04 on host default should use /usr/bin/python3, but is using /usr/bin/python for backward compatibility with prior Ansible releases. A future Ansible release will default to using the discovered platform python for this host. See https://docs.ansible.com/ansible/ 2.9/reference_appendices/interpreter_discovery.html for more information. This feature will be removed in version 2.12. Deprecation warnings can be disabled by setting deprecation_warnings=False in ansible.cfg. ok: [default] TASK [debug] ******************************************************************* ok: [default] => { "msg": "test mode" } TASK [wait for nomad to come up] *********************************************** ok: [default] TASK [start minio nomad job] *************************************************** changed: [default] TASK [start hive nomad job] **************************************************** changed: [default] TASK [Test healthchecks of services] ******************************************* included: /vagrant/test/test.yml for default TASK [Minio healthchecks pass] ************************************************* FAILED - RETRYING: Minio healthchecks pass (15 retries left). FAILED - RETRYING: Minio healthchecks pass (14 retries left). FAILED - RETRYING: Minio healthchecks pass (13 retries left). FAILED - RETRYING: Minio healthchecks pass (12 retries left). ok: [default] TASK [Response body result_minio] ********************************************** ok: [default] => { "msg": "Here [{'Node': 'vagrant', 'CheckID': '_nomad-check-471a72c355bf6548532de8f38d283b0b5f71b4ac', 'Name': 'minio-ready', 'Definition': {}, 'Notes': '', 'ModifyIndex': 91, 'Status': 'passing', 'ServiceName': 'minio', 'ServiceID': '_nomad-task-52849690-ea91-628f-afbe-a15441c4d5e0-group-s3-minio-9000', 'Output': 'HTTP GET http://10.0.3.10:22159/minio/health/ready: 200 OK Output: ', 'ServiceTags': [], 'CreateIndex': 59, 'Type': 'http'}, {'Node': 'vagrant', 'CheckID': '_nomad-check-4c799cf56c5ee4e6140489e6505c497850382db2', 'Name': 'minio-live', 'Definition': {}, 'Notes': '', 'ModifyIndex': 90, 'Status': 'passing', 'ServiceName': 'minio', 'ServiceID': '_nomad-task-52849690-ea91-628f-afbe-a15441c4d5e0-group-s3-minio-9000', 'Output': 'HTTP GET http://10.0.3.10:30760/minio/health/live: 200 OK Output: ', 'ServiceTags': [], 'CreateIndex': 57, 'Type': 'http'}]" } TASK [Hive-database healthchecks pass] ***************************************** FAILED - RETRYING: Hive-database healthchecks pass (20 retries left). ok: [default] TASK [Response body result_hive_database] ************************************** ok: [default] => { "msg": "Here [{'Node': 'vagrant', 'CheckID': '_nomad-check-330cd2774d7406fab34674f2e5cc9f9e354a4bfa', 'Name': 'service: \"hive-database\" check', 'Definition': {}, 'Notes': '', 'ModifyIndex': 102, 'Status': 'passing', 'ServiceName': 'hive-database', 'ServiceID': '_nomad-task-bc877336-574f-b2ef-a55c-872950830288-group-database-hive-database-5432', 'Output': '/var/run/postgresql:5432 - accepting connections\\n', 'ServiceTags': [], 'CreateIndex': 73, 'Type': 'ttl'}]" } TASK [Hive-metastore healthchecks pass] **************************************** FAILED - RETRYING: Hive-metastore healthchecks pass (30 retries left). FAILED - RETRYING: Hive-metastore healthchecks pass (29 retries left). FAILED - RETRYING: Hive-metastore healthchecks pass (28 retries left). FAILED - RETRYING: Hive-metastore healthchecks pass (27 retries left). FAILED - RETRYING: Hive-metastore healthchecks pass (26 retries left). FAILED - RETRYING: Hive-metastore healthchecks pass (25 retries left). ok: [default] TASK [Response body result_hive_metastore] ************************************* ok: [default] => { "msg": "Here [{'Node': 'vagrant', 'CheckID': '_nomad-check-69bc99d45cf6b2218ec63a79776c39b0c346b52a', 'Name': 'beeline', 'Definition': {}, 'Notes': '', 'ModifyIndex': 173, 'Status': 'passing', 'ServiceName': 'hive-metastore', 'ServiceID': '_nomad-task-5323d600-389e-3b34-3929-a875b9196eb1-group-metastore-hive-metastore-9083', 'Output': 'return code 0\\n', 'ServiceTags': [], 'CreateIndex': 44, 'Type': 'ttl'}]" } TASK [Hive-server healthchecks pass] ******************************************* FAILED - RETRYING: Hive-server healthchecks pass (20 retries left). FAILED - RETRYING: Hive-server healthchecks pass (19 retries left). FAILED - RETRYING: Hive-server healthchecks pass (18 retries left). ok: [default] TASK [Response body result_hive_server] **************************************** ok: [default] => { "msg": "Here [{'Node': 'vagrant', 'CheckID': '_nomad-check-5bb9b444d608fe2decc5496a0e951dc6437e95ef', 'Name': 'jmx', 'Definition': {}, 'Notes': '', 'ModifyIndex': 207, 'Status': 'passing', 'ServiceName': 'hive-server', 'ServiceID': '_nomad-task-8909b517-0e56-9597-2156-4dc33cfbf609-group-server-hive-server-10000', 'Output': 'HTTP GET http://10.0.3.10:27098/jmx: 200 OK Output: _path_latency50thPercentileLatency\" : 0,\\n \"S3guard_metadatastore_put_path_latency75thPercentileLatency\" : 0,\\n \"S3guard_metadatastore_put_path_latency90thPercentileLatency\" : 0,\\n \"S3guard_metadatastore_put_path_latency95thPercentileLatency\" : 0,\\n \"S3guard_metadatastore_put_path_latency99thPercentileLatency\" : 0,\\n \"S3guard_metadatastore_throttle_rateNumEvents\" : 0\\n }, {\\n \"name\" : \"org.apache.logging.log4j2:type=AsyncContext@4f4a7090,component=AsyncLoggerRingBuffer\",\\n \"modelerType\" : \"org.apache.logging.log4j.core.jmx.RingBufferAdmin\",\\n \"RemainingCapacity\" : 262144,\\n \"BufferSize\" : 262144\\n }, {\\n \"name\" : \"org.apache.logging.log4j2:type=AsyncContext@4f4a7090\",\\n \"modelerType\" : \"org.apache.logging.log4j.core.jmx.LoggerContextAdmin\",\\n \"ObjectName\" : \"org.apache.logging.log4j2:type=AsyncContext@4f4a7090\",\\n \"ConfigLocationUri\" : \"file:/opt/hive/conf/hive-log4j2.properties\",\\n \"Status\" : \"STARTED\",\\n \"ConfigText\" : \"# Licensed to the Apache Software Foundation (ASF) under one\\\\n# or more contributor license agreements. See the NOTICE file\\\\n# distributed with this work for additional information\\\\n# regarding copyright ownership. The ASF licenses this file\\\\n# to you under the Apache License, Version 2.0 (the\\\\n# \\\\\"License\\\\\"); you may not use this file except in compliance\\\\n# with the License. You may obtain a copy of the License at\\\\n#\\\\n# http://www.apache.org/licenses/LICENSE-2.0\\\\n#\\\\n# Unless required by applicable law or agreed to in writing, software\\\\n# distributed under the License is distributed on an \\\\\"AS IS\\\\\" BASIS,\\\\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\\\\n# See the License for the specific language governing permissions and\\\\n# limitations under the License.\\\\n\\\\nstatus = INFO\\\\nname = HiveLog4j2\\\\npackages = org.apache.hadoop.hive.ql.log\\\\n\\\\n# list of properties\\\\nproperty.hive.log.level = INFO\\\\nproperty.hive.root.logger = DRFA\\\\nproperty.hive.log.dir = ${sys:java.io.tmpdir}/${sys:user.name}\\\\nproperty.hive.log.file = hive.log\\\\n\\\\n# list of all appenders\\\\nappenders = console, DRFA\\\\n\\\\n# console appender\\\\nappender.console.type = Console\\\\nappender.console.name = console\\\\nappender.console.target = SYSTEM_ERR\\\\nappender.console.layout.type = PatternLayout\\\\nappender.console.layout.pattern = %d{yy/MM/dd HH:mm:ss} [%t]: %p %c{2}: %m%n\\\\n\\\\n# daily rolling file appender\\\\nappender.DRFA.type = RollingFile\\\\nappender.DRFA.name = DRFA\\\\nappender.DRFA.fileName = ${sys:hive.log.dir}/${sys:hive.log.file}\\\\n# Use %pid in the filePattern to append <process-id>@<host-name> to the filename if you want separate log files for different CLI session\\\\nappender.DRFA.filePattern = ${sys:hive.log.dir}/${sys:hive.log.file}.%d{yyyy-MM-dd}\\\\nappender.DRFA.layout.type = PatternLayout\\\\nappender.DRFA.layout.pattern = %d{ISO8601} %-5p [%t]: %c{2} (%F:%M(%L)) - %m%n\\\\nappender.DRFA.policies.type = Policies\\\\nappender.DRFA.policies.time.type = TimeBasedTriggeringPolicy\\\\nappender.DRFA.policies.time.interval = 1\\\\nappender.DRFA.policies.time.modulate = true\\\\nappender.DRFA.strategy.type = DefaultRolloverStrategy\\\\nappender.DRFA.strategy.max = 30\\\\n\\\\n# list of all loggers\\\\nloggers = NIOServerCnxn, ClientCnxnSocketNIO, DataNucleus, Datastore, JPOX\\\\n\\\\nlogger.NIOServerCnxn.name = org.apache.zookeeper.server.NIOServerCnxn\\\\nlogger.NIOServerCnxn.level = WARN\\\\n\\\\nlogger.ClientCnxnSocketNIO.name = org.apache.zookeeper.ClientCnxnSocketNIO\\\\nlogger.ClientCnxnSocketNIO.level = WARN\\\\n\\\\nlogger.DataNucleus.name = DataNucleus\\\\nlogger.DataNucleus.level = ERROR\\\\n\\\\nlogger.Datastore.name = Datastore\\\\nlogger.Datastore.level = ERROR\\\\n\\\\nlogger.JPOX.name = JPOX\\\\nlogger.JPOX.level = ERROR\\\\n\\\\n# root logger\\\\nrootLogger.level = ${sys:hive.log.level}\\\\nrootLogger.appenderRefs = root\\\\nrootLogger.appenderRef.root.ref = ${sys:hive.root.logger}\\\\n\",\\n \"ConfigName\" : \"HiveLog4j2\",\\n \"ConfigClassName\" : \"org.apache.logging.log4j.core.config.properties.PropertiesConfiguration\",\\n \"ConfigFilter\" : \"null\",\\n \"ConfigProperties\" : \"{hostName=021c1375a632, contextName=AsyncContext@4f4a7090}\",\\n \"Name\" : \"AsyncContext@4f4a7090\"\\n } ]\\n}', 'ServiceTags': [], 'CreateIndex': 33, 'Type': 'http'}, {'Node': 'vagrant', 'CheckID': '_nomad-check-9ee220d3a1c3fa92bbffd8f18d6cd67c881040eb', 'Name': 'beeline', 'Definition': {}, 'Notes': '', 'ModifyIndex': 196, 'Status': 'passing', 'ServiceName': 'hive-server', 'ServiceID': '_nomad-task-8909b517-0e56-9597-2156-4dc33cfbf609-group-server-hive-server-10000', 'Output': 'return code 0\\n', 'ServiceTags': [], 'CreateIndex': 35, 'Type': 'ttl'}]" } PLAY RECAP ********************************************************************* default : ok=14 changed=2 unreachable=0 failed=0 skipped=0 rescued=0 ignored=0
This fix optimize test performance
Logs