add jar hdfs:/hadoop-aws-2.7.4.4.jar;
我没有遇到任何错误,因此似乎可以正确找到JAR(如果更改名称,我找不到错误文件)。然而,创建外部表仍会出现相同的错误失败。
我做错了什么?我虽然可能与不兼容的版本有关,但它似乎与我的Docker-Compose版本相匹配。 the是使用的Docker-Compose:
version: "3"
services:
namenode:
image: bde2020/hadoop-namenode:2.0.0-hadoop2.7.4-java8
volumes:
- namenode:/hadoop/dfs/name
environment:
- CLUSTER_NAME=test
env_file:
- ./hadoop-hive.env
ports:
- "50070:50070"
datanode:
image: bde2020/hadoop-datanode:2.0.0-hadoop2.7.4-java8
volumes:
- datanode:/hadoop/dfs/data
env_file:
- ./hadoop-hive.env
environment:
SERVICE_PRECONDITION: "namenode:50070"
ports:
- "50075:50075"
resourcemanager:
image: bde2020/hadoop-resourcemanager:2.0.0-hadoop2.7.4-java8
environment:
SERVICE_PRECONDITION: "namenode:50070 datanode:50075"
env_file:
- ./hadoop-hive.env
hive-server:
image: bde2020/hive:2.3.2-postgresql-metastore
env_file:
- ./hadoop-hive.env
environment:
HIVE_CORE_CONF_javax_jdo_option_ConnectionURL: "jdbc:postgresql://hive-metastore/metastore"
SERVICE_PRECONDITION: "hive-metastore:9083"
ports:
- "10000:10000"
hive-metastore:
image: bde2020/hive:2.3.2-postgresql-metastore
env_file:
- ./hadoop-hive.env
command: /opt/hive/bin/hive --service metastore
environment:
SERVICE_PRECONDITION: "namenode:50070 datanode:50075 hive-metastore-postgresql:5432 resourcemanager:8088"
ports:
- "9083:9083"
hive-metastore-postgresql:
image: bde2020/hive-metastore-postgresql:2.3.0
ports:
- "5432:5432"
huedb:
image: postgres:12.1-alpine
volumes:
- pg_data:/var/lib/postgresl/data/
ports:
- "5432"
env_file:
- ./hadoop-hive.env
environment:
SERVICE_PRECONDITION: "namenode:50070 datanode:50075 hive-metastore-postgresql:5432 resourcemanager:8088 hive-metastore:9083"
hue:
image: gethue/hue:4.6.0
environment:
SERVICE_PRECONDITION: "namenode:50070 datanode:50075 hive-metastore-postgresql:5432 resourcemanager:8088 hive-metastore:9083 huedb:5000"
ports:
- "8888:8888"
volumes:
- ./hue-overrides.ini:/usr/share/hue/desktop/conf/hue-overrides.ini
links:
- huedb
volumes:
namenode:
datanode:
pg_data:
会喜欢任何帮助! thanks,
使用官方图像和AWS依赖项在
apache/hive
中可用。 Hive默认情况下不会加载它们,因为显然,该文件夹中的罐子被排除在外。相反,您可以创建一个软链接到
/opt/hadoop/share/hadoop/tools/lib/
。这是一个工作的码头:
/opt/hadoop/share/hadoop/common/lib/
具有自定义基础图像,请确保与其他版本对齐的版本。
ARG HIVE_VERSION
FROM apache/hive:${HIVE_VERSION}
USER root
RUN <<EOF
ln -s /opt/hadoop/share/hadoop/tools/lib/hadoop-aws* /opt/hadoop/share/hadoop/common/lib/
ln -s /opt/hadoop/share/hadoop/tools/lib/aws-java-sdk* /opt/hadoop/share/hadoop/common/lib/
EOF
USER hive
(见依赖关系分析
)。