[test](hive)revert hive container to host mode (#34322)

Revert hive container to host mode to fix pipeline problem
This commit is contained in:
苏小刚
2024-05-06 10:58:13 +08:00
committed by yiguolei
parent 63cd632abe
commit 4ecc3edc21
7 changed files with 55 additions and 114 deletions

View File

@ -1,52 +0,0 @@
#
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
HIVE_SITE_CONF_javax_jdo_option_ConnectionURL=jdbc:postgresql://hive-metastore-postgresql/metastore
HIVE_SITE_CONF_javax_jdo_option_ConnectionDriverName=org.postgresql.Driver
HIVE_SITE_CONF_javax_jdo_option_ConnectionUserName=hive
HIVE_SITE_CONF_javax_jdo_option_ConnectionPassword=hive
HIVE_SITE_CONF_datanucleus_autoCreateSchema=false
HIVE_SITE_CONF_hive_metastore_uris=thrift://hive-metastore:9083
HIVE_SITE_CONF_hive_server2_thrift_bind_host=0.0.0.0
HIVE_SITE_CONF_hive_server2_thrift_port=10000
HIVE_SITE_CONF_hive_compactor_initiator_on=true
HIVE_SITE_CONF_hive_compactor_worker_threads=2
HIVE_SITE_CONF_metastore_storage_schema_reader_impl=org.apache.hadoop.hive.metastore.SerDeStorageSchemaReader
CORE_CONF_fs_defaultFS=hdfs://namenode:8020
CORE_CONF_hadoop_http_staticuser_user=root
CORE_CONF_hadoop_proxyuser_hue_hosts=*
CORE_CONF_hadoop_proxyuser_hue_groups=*
HDFS_CONF_dfs_webhdfs_enabled=true
HDFS_CONF_dfs_permissions_enabled=false
HDFS_CONF_dfs_namenode_datanode_registration_ip___hostname___check=false
YARN_CONF_yarn_log___aggregation___enable=true
YARN_CONF_yarn_resourcemanager_recovery_enabled=true
YARN_CONF_yarn_resourcemanager_store_class=org.apache.hadoop.yarn.server.resourcemanager.recovery.FileSystemRMStateStore
YARN_CONF_yarn_resourcemanager_fs_state___store_uri=/rmstate
YARN_CONF_yarn_nodemanager_remote___app___log___dir=/app-logs
YARN_CONF_yarn_log_server_url=http://historyserver:8188/applicationhistory/logs/
YARN_CONF_yarn_timeline___service_enabled=true
YARN_CONF_yarn_timeline___service_generic___application___history_enabled=true
YARN_CONF_yarn_resourcemanager_system___metrics___publisher_enabled=true
YARN_CONF_yarn_resourcemanager_hostname=resourcemanager
YARN_CONF_yarn_timeline___service_hostname=historyserver
YARN_CONF_yarn_resourcemanager_address=resourcemanager:8032
YARN_CONF_yarn_resourcemanager_scheduler_address=resourcemanager:8030
YARN_CONF_yarn_resourcemanager_resource__tracker_address=resourcemanager:8031

View File

@ -15,14 +15,16 @@
# limitations under the License.
#
HIVE_SITE_CONF_javax_jdo_option_ConnectionURL=jdbc:postgresql://hive-metastore-postgresql/metastore
HIVE_SITE_CONF_javax_jdo_option_ConnectionURL=jdbc:postgresql://${IP_HOST}:${PG_PORT}/metastore
HIVE_SITE_CONF_javax_jdo_option_ConnectionDriverName=org.postgresql.Driver
HIVE_SITE_CONF_javax_jdo_option_ConnectionUserName=hive
HIVE_SITE_CONF_javax_jdo_option_ConnectionPassword=hive
HIVE_SITE_CONF_datanucleus_autoCreateSchema=false
HIVE_SITE_CONF_hive_metastore_uris=thrift://hive-metastore:9083
HIVE_SITE_CONF_hive_metastore_port=${HMS_PORT}
HIVE_SITE_CONF_hive_metastore_uris=thrift://${IP_HOST}:${HMS_PORT}
HIVE_SITE_CONF_hive_server2_thrift_bind_host=0.0.0.0
HIVE_SITE_CONF_hive_server2_thrift_port=10000
HIVE_SITE_CONF_hive_server2_thrift_port=${HS_PORT}
HIVE_SITE_CONF_hive_server2_webui_port=0
HIVE_SITE_CONF_hive_compactor_initiator_on=true
HIVE_SITE_CONF_hive_compactor_worker_threads=2
HIVE_SITE_CONF_metastore_storage_schema_reader_impl=org.apache.hadoop.hive.metastore.SerDeStorageSchemaReader

View File

@ -26,62 +26,68 @@ services:
env_file:
- ./hadoop-hive.env
container_name: ${CONTAINER_UID}hadoop2-namenode
ports:
- "${FS_PORT}:8020"
expose:
- "50070"
- "${FS_PORT}"
healthcheck:
test: [ "CMD", "curl", "http://localhost:50070/" ]
interval: 5s
timeout: 120s
retries: 120
network_mode: "host"
datanode:
image: bde2020/hadoop-datanode:2.0.0-hadoop2.7.4-java8
env_file:
- ./hadoop-hive.env
environment:
SERVICE_PRECONDITION: "namenode:50070"
SERVICE_PRECONDITION: "${IP_HOST}:50070"
container_name: ${CONTAINER_UID}hadoop2-datanode
expose:
- "50075"
healthcheck:
test: [ "CMD", "curl", "http://localhost:50075" ]
interval: 5s
timeout: 60s
retries: 120
network_mode: "host"
hive-server:
image: bde2020/hive:2.3.2-postgresql-metastore
env_file:
- ./hadoop-hive-metastore.env
- ./hadoop-hive.env
environment:
HIVE_CORE_CONF_javax_jdo_option_ConnectionURL: "jdbc:postgresql://hive-metastore/metastore"
SERVICE_PRECONDITION: "hive-metastore:9083"
HIVE_CORE_CONF_javax_jdo_option_ConnectionURL: "jdbc:postgresql://${IP_HOST}:${PG_PORT}/metastore"
SERVICE_PRECONDITION: "${IP_HOST}:${HMS_PORT}"
container_name: ${CONTAINER_UID}hive2-server
ports:
- "${HS_PORT}:10000"
expose:
- "${HS_PORT}"
depends_on:
- datanode
- namenode
healthcheck:
test: beeline -u "jdbc:hive2://127.0.0.1:10000/default" -n health_check -e "show databases;"
test: beeline -u "jdbc:hive2://127.0.0.1:${HS_PORT}/default" -n health_check -e "show databases;"
interval: 10s
timeout: 120s
retries: 120
network_mode: "host"
hive-metastore:
image: bde2020/hive:2.3.2-postgresql-metastore
env_file:
- ./hadoop-hive-metastore.env
- ./hadoop-hive.env
command: /bin/bash /mnt/scripts/hive-metastore.sh
# command: /opt/hive/bin/hive --service metastore
environment:
SERVICE_PRECONDITION: "namenode:50070 datanode:50075 hive-metastore-postgresql:5432"
SERVICE_PRECONDITION: "${IP_HOST}:50070 ${IP_HOST}:50075 ${IP_HOST}:${PG_PORT}"
container_name: ${CONTAINER_UID}hive2-metastore
ports:
- "${HMS_PORT}:9083"
expose:
- "${HMS_PORT}"
volumes:
- ./scripts:/mnt/scripts
depends_on:
- hive-metastore-postgresql
network_mode: "host"
hive-metastore-postgresql:
image: bde2020/hive-metastore-postgresql:2.3.0
@ -93,13 +99,3 @@ services:
interval: 5s
timeout: 60s
retries: 120
# solve HiveServer2 connect error:
# java.net.URISyntaxException Illegal character in hostname :thrift://${CONTAINER_UID}hive2_default:9083
networks:
default:
name: ${CONTAINER_UID}hive2-default
ipam:
driver: default
config:
- subnet: 168.58.0.0/24

View File

@ -20,7 +20,7 @@
# Do not use "_" or other sepcial characters, only number and alphabeta.
# NOTICE: change this uid will modify hive-*.yaml
export FS_PORT=8020 #should be same as hive2HdfsPort in regression-conf.groovy
export HMS_PORT=9083 #should be same as hive2HmsPort in regression-conf.groovy
export HS_PORT=10000 #should be same as hive2ServerPort in regression-conf.groovy
export PG_PORT=5432 #should be same as hive2PgPort in regression-conf.groovy
export FS_PORT=8020 # should be same as hive3HmsPort in regression-conf.groovy
export HMS_PORT=9083 # should be same as hive2HmsPort in regression-conf.groovy
export HS_PORT=10000 # should be same as hive2ServerPort in regression-conf.groovy
export PG_PORT=5432 # should be same as hive2PgPort in regression-conf.groovy

View File

@ -26,62 +26,68 @@ services:
env_file:
- ./hadoop-hive.env
container_name: ${CONTAINER_UID}hadoop3-namenode
ports:
- "${FS_PORT}:8020"
expose:
- "9870"
- "${FS_PORT}"
healthcheck:
test: [ "CMD", "curl", "http://localhost:9870/" ]
interval: 5s
timeout: 120s
retries: 120
network_mode: "host"
datanode:
image: bde2020/hadoop-datanode:2.0.0-hadoop3.2.1-java8
env_file:
- ./hadoop-hive.env
environment:
SERVICE_PRECONDITION: "namenode:9870"
SERVICE_PRECONDITION: "${IP_HOST}:9870"
container_name: ${CONTAINER_UID}hadoop3-datanode
expose:
- "9864"
healthcheck:
test: [ "CMD", "curl", "http://localhost:9864" ]
interval: 5s
timeout: 60s
retries: 120
network_mode: "host"
hive-server:
image: lishizhen/hive:3.1.2-postgresql-metastore
env_file:
- ./hadoop-hive-metastore.env
- ./hadoop-hive.env
environment:
HIVE_CORE_CONF_javax_jdo_option_ConnectionURL: "jdbc:postgresql://hive-metastore/metastore"
SERVICE_PRECONDITION: "hive-metastore:9083"
HIVE_CORE_CONF_javax_jdo_option_ConnectionURL: "jdbc:postgresql://${IP_HOST}:${PG_PORT}/metastore"
SERVICE_PRECONDITION: "${IP_HOST}:${HMS_PORT}"
container_name: ${CONTAINER_UID}hive3-server
ports:
- "${HS_PORT}:10000"
expose:
- "${HS_PORT}"
depends_on:
- datanode
- namenode
healthcheck:
test: beeline -u "jdbc:hive2://127.0.0.1:10000/default" -n health_check -e "show databases;"
test: beeline -u "jdbc:hive2://127.0.0.1:${HS_PORT}/default" -n health_check -e "show databases;"
interval: 10s
timeout: 120s
retries: 120
network_mode: "host"
hive-metastore:
image: lishizhen/hive:3.1.2-postgresql-metastore
env_file:
- ./hadoop-hive-metastore.env
- ./hadoop-hive.env
command: /bin/bash /mnt/scripts/hive-metastore.sh
# command: /opt/hive/bin/hive --service metastore
environment:
SERVICE_PRECONDITION: "namenode:9870 datanode:9864 hive-metastore-postgresql:5432"
SERVICE_PRECONDITION: "${IP_HOST}:9870 ${IP_HOST}:9864 ${IP_HOST}:${PG_PORT}"
container_name: ${CONTAINER_UID}hive3-metastore
ports:
- "${HMS_PORT}:9083"
expose:
- "${HMS_PORT}"
volumes:
- ./scripts:/mnt/scripts
depends_on:
- hive-metastore-postgresql
network_mode: "host"
hive-metastore-postgresql:
image: bde2020/hive-metastore-postgresql:3.1.0
@ -93,14 +99,3 @@ services:
interval: 5s
timeout: 60s
retries: 120
# solve HiveServer2 connect error:
# java.net.URISyntaxException Illegal character in hostname :thrift://${CONTAINER_UID}hive3_default:9083
networks:
default:
name: ${CONTAINER_UID}hive3-default
ipam:
driver: default
config:
- subnet: 168.59.0.0/24

View File

@ -20,7 +20,7 @@
# Do not use "_" or other sepcial characters, only number and alphabeta.
# NOTICE: change this uid will modify hive-*.yaml
export FS_PORT=8320 #should be same as hive3HdfsPort in regression-conf.groovy
export HMS_PORT=9383 #should be same as hive3HmsPort in regression-conf.groovy
export HS_PORT=13000 #should be same as hive3ServerPort in regression-conf.groovy
export PG_PORT=5732 #should be same as hive3PgPort in regression-conf.groovy
export FS_PORT=8320 # should be same as hive3HmsPort in regression-conf.groovy
export HMS_PORT=9383 # should be same as hive3HmsPort in regression-conf.groovy
export HS_PORT=13000 # should be same as hive3ServerPort in regression-conf.groovy
export PG_PORT=5732 # should be same as hive3PgPort in regression-conf.groovy

View File

@ -319,7 +319,7 @@ if [[ "${RUN_HIVE2}" -eq 1 ]]; then
export CONTAINER_UID=${CONTAINER_UID}
. "${ROOT}"/docker-compose/hive/hive-2x_settings.env
envsubst < "${ROOT}"/docker-compose/hive/hive-2x.yaml.tpl > "${ROOT}"/docker-compose/hive/hive-2x.yaml
envsubst < "${ROOT}"/docker-compose/hive/hadoop-hive-metastore.env.tpl > "${ROOT}"/docker-compose/hive/hadoop-hive-metastore.env
envsubst < "${ROOT}"/docker-compose/hive/hadoop-hive.env.tpl > "${ROOT}"/docker-compose/hive/hadoop-hive.env
sudo docker compose -p ${CONTAINER_UID}hive2 -f "${ROOT}"/docker-compose/hive/hive-2x.yaml --env-file "${ROOT}"/docker-compose/hive/hadoop-hive.env down
if [[ "${STOP}" -ne 1 ]]; then
sudo docker compose -p ${CONTAINER_UID}hive2 -f "${ROOT}"/docker-compose/hive/hive-2x.yaml --env-file "${ROOT}"/docker-compose/hive/hadoop-hive.env up --build --remove-orphans -d
@ -345,7 +345,7 @@ if [[ "${RUN_HIVE3}" -eq 1 ]]; then
export CONTAINER_UID=${CONTAINER_UID}
. "${ROOT}"/docker-compose/hive/hive-3x_settings.env
envsubst < "${ROOT}"/docker-compose/hive/hive-3x.yaml.tpl > "${ROOT}"/docker-compose/hive/hive-3x.yaml
envsubst < "${ROOT}"/docker-compose/hive/hadoop-hive-metastore.env.tpl > "${ROOT}"/docker-compose/hive/hadoop-hive-metastore.env
envsubst < "${ROOT}"/docker-compose/hive/hadoop-hive.env.tpl > "${ROOT}"/docker-compose/hive/hadoop-hive.env
sudo docker compose -p ${CONTAINER_UID}hive3 -f "${ROOT}"/docker-compose/hive/hive-3x.yaml --env-file "${ROOT}"/docker-compose/hive/hadoop-hive.env down
if [[ "${STOP}" -ne 1 ]]; then
sudo docker compose -p ${CONTAINER_UID}hive3 -f "${ROOT}"/docker-compose/hive/hive-3x.yaml --env-file "${ROOT}"/docker-compose/hive/hadoop-hive.env up --build --remove-orphans -d