diff --git a/tests/hbase-setup.sh b/tests/hbase-setup.sh
index f86c0e9..0ac4f8e 100755
--- a/tests/hbase-setup.sh
+++ b/tests/hbase-setup.sh
@@ -4,9 +4,10 @@ set -ueo pipefail
HBASE_VERSION="0.96.1.1"
HBASE_FILE="hbase-${HBASE_VERSION}-hadoop2-bin.tar.gz"
+HBASE_DIR="hbase-${HBASE_VERSION}-hadoop2"
#HBASE_URL="https://downloads.apache.org/hbase/${HBASE_VERSION}/${HBASE_FILE}"
HBASE_URL="https://archive.apache.org/dist/hbase/hbase-${HBASE_VERSION}/${HBASE_FILE}"
-HBASE_FILE_CKSUM="5afb643c2391461619516624168e042b42a66e25217a3319552264c6af522e3a21a5212bfcba759b7b976794648ef13ee7b5a415f33cdb89bba43d40162aa685"
+HBASE_FILE_CKSUM="1625453f839f7d8c86078a131af9731f6df28c59e58870db84913dcbc640d430253134a825de7cec247ea1f0cf232435765e00844ee2e4faf31aeb356955c478"
HBASE_CONFIG_FILE="hbase/conf/hbase-site.xml"
HBASE_TEST_SUITE_EXECUTABLE="hbase/bin/hbase"
@@ -42,7 +43,8 @@ prepare_hbase() {
printf "HBase archive exists\n"
if compare_checksum $HBASE_FILE $HBASE_FILE_CKSUM; then
extract_archive $HBASE_FILE $HBASE_VERSION
- mv -f hbase-"${VERSION}" hbase/
+ mv -f "${HBASE_DIR}" hbase
+ return
else
printf "HBase archive has wrong checksum (%s)\n" "$1"
printf "Execute script again to redownload file\n"
@@ -55,7 +57,7 @@ prepare_hbase() {
if compare_checksum $HBASE_FILE $HBASE_FILE_CKSUM; then
extract_archive $HBASE_FILE $HBASE_VERSION
- mv -f hbase-${HBASE_VERSION} hbase/
+ mv -f ${HBASE_DIR} hbase
fi
fi
}
diff --git a/tests/hdfs-setup.sh b/tests/hdfs-setup.sh
index b32298d..a961dc3 100755
--- a/tests/hdfs-setup.sh
+++ b/tests/hdfs-setup.sh
@@ -9,6 +9,7 @@ HADOOP_FILE_CKSUM="2460e02cd1f80dfed7a8981bbc934c095c0a341435118bec781fd835ec2eb
HDFS_CONFIG_FILE="hadoop/etc/hadoop/hdfs-site.xml"
HDFS_CONFIG_FILE_CORE="hadoop/etc/hadoop/core-site.xml"
HDFS_CONFIG_FILE_MAPRED="hadoop/etc/hadoop/mapred-site.xml"
+HDFS_CONFIG_DATANODES="localhost"
HDFS_TEST_SUITE_EXECUTABLE="hadoop/bin/hdfs"
source setup.sh
@@ -23,7 +24,7 @@ create_hdfs_core_config() {
fs.defaultFS
- hdfs://$1
+ hdfs://$2
EOF
@@ -51,9 +52,33 @@ create_hdfs_config() {
+
+ dfs.nameservices
+ $2
+
+
+ dfs.ha.namenodes.$2
+ nn1,nn2
+
fs.defaultFS
- hdfs://$1
+ hdfs://$2
+
+
+ dfs.namenode.rpc-address.$2.nn1
+ localhost:8020
+
+
+ dfs.namenode.rpc-address.$2.nn2
+ master-1:8020
+
+
+ dfs.namenode.http-address.$2.nn1
+ localhost:50070
+
+
+ dfs.namenode.http-address.$2.nn2
+ master-1:50070
dfs.namenode.name.dir
@@ -62,7 +87,15 @@ create_hdfs_config() {
dfs.datanode.data.dir
/tmp/hdfs/datanode
-
+
+
+ dfs.namenode.shared.edits.dir
+ file:///tmp/hadoop
+
+
+ ha.zookeeper.quorum
+ 127.0.0.1:2181
+
EOF
echo "$CONFIG"
@@ -95,9 +128,10 @@ prepare_hadoop() {
check_dependencies
prepare_hadoop ${HADOOP_URL}
-HDFS_CONFIG=$(create_hdfs_config "127.0.0.1:8020")
-HDFS_CONFIG_CORE=$(create_hdfs_core_config "127.0.0.1:8020")
+HDFS_CONFIG=$(create_hdfs_config "127.0.0.1:8020" "test-cluster")
+HDFS_CONFIG_CORE=$(create_hdfs_core_config "127.0.0.1:8020" "test-cluster")
HDFS_CONFIG_MAPRED=$(create_hdfs_mapred_config "127.0.0.1:8021")
write_file ${HDFS_CONFIG_FILE} "${HDFS_CONFIG}"
write_file ${HDFS_CONFIG_FILE_CORE} "${HDFS_CONFIG_CORE}"
write_file ${HDFS_CONFIG_FILE_MAPRED} "${HDFS_CONFIG_MAPRED}"
+write_file ${HDFS_CONFIG_DATANODES} "localhost"
diff --git a/tests/setup.sh b/tests/setup.sh
index d40f7f0..e04dff0 100644
--- a/tests/setup.sh
+++ b/tests/setup.sh
@@ -22,7 +22,6 @@ compare_checksum() {
local r
CKSUM=$(sha512 -q "${1}")
if ! [ "$CKSUM" = "$2" ]; then
- printf "File has wrong checksum (%s)\n" "$1"
r=1
else
r=0
@@ -36,12 +35,14 @@ write_file() {
}
run() {
+ local pid
printf "Starting %s\n" "$2"
if $($1 > /dev/null 2>&1 &); then
printf "Started %s successfully\n" "$2"
+ pid=$!
else
printf "Failed to start %s\n" "$2"
- exit 1
+ pid="-1"
fi
- echo "$!"
+ echo "$pid"
}
diff --git a/tests/test_hbase_exporter.sh b/tests/test_hbase_exporter.sh
index a93aacf..711bfc7 100644
--- a/tests/test_hbase_exporter.sh
+++ b/tests/test_hbase_exporter.sh
@@ -4,7 +4,8 @@ HBASE_TIME_STARTUP=15
HBASE_EXPORTER_TIME_STARTUP=60
HBASE_CMD="./bin/hbase-daemon.sh --config conf start master"
HDFS_FORMAT=false
-HDFS_CMD="./hadoop/bin/hdfs --config hadoop/etc/hadoop namenode"
+HDFS_CMD_NAMENODE="./hadoop/bin/hdfs --config hadoop/etc/hadoop namenode"
+HDFS_CMD_DATANODE="./hadoop/bin/hdfs --config hadoop/etc/hadoop datanode"
HDFS_CMD_FORMAT="./hadoop/bin/hdfs --config hadoop/etc/hadoop namenode -format"
source setup.sh
@@ -31,7 +32,8 @@ setup_suite() {
r=run $HDFS_CMD_FORMAT "HDFS_FORMAT"
fi
- run "$HDFS_CMD" "HDFS"
+ run "$HDFS_CMD_NAMENODE" "HDFS Namenode"
+ run "$HDFS_CMD_DATANODE" "HDFS Datanode"
# Start HBase
cd hbase/ || exit