File: minicluster.sh

package info (click to toggle)
golang-github-colinmarc-hdfs 2.3.0-2
  • links: PTS, VCS
  • area: main
  • in suites: bookworm, forky, sid, trixie
  • size: 3,760 kB
  • sloc: sh: 130; xml: 40; makefile: 31
file content (54 lines) | stat: -rwxr-xr-x 1,598 bytes parent folder | download
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
#!/bin/sh

HADOOP_HOME=${HADOOP_HOME-"/tmp/hadoop"}
NN_PORT=${NN_PORT-"9000"}
HADOOP_NAMENODE="localhost:$NN_PORT"

if [ ! -d "$HADOOP_HOME" ]; then
  mkdir -p $HADOOP_HOME

  echo "Downloading latest CDH to ${HADOOP_HOME}/hadoop.tar.gz"
  curl -o ${HADOOP_HOME}/hadoop.tar.gz -L http://archive.cloudera.com/cdh5/cdh/5/hadoop-latest.tar.gz

  echo "Extracting ${HADOOP_HOME}/hadoop.tar.gz into $HADOOP_HOME"
  tar zxf ${HADOOP_HOME}/hadoop.tar.gz --strip-components 1 -C $HADOOP_HOME
fi

MINICLUSTER_JAR=$(find $HADOOP_HOME -name "hadoop-mapreduce-client-jobclient*.jar" | grep -v tests | grep -v sources | head -1)
if [ ! -f "$MINICLUSTER_JAR" ]; then
  echo "Couldn't find minicluster jar!"
  exit 1
fi

echo "Starting minicluster..."
$HADOOP_HOME/bin/hadoop jar $MINICLUSTER_JAR minicluster -nnport $NN_PORT -datanodes 3 -nomr -format "$@" > minicluster.log 2>&1 &

export HADOOP_CONF_DIR=$(mktemp -d)
cat > $HADOOP_CONF_DIR/core-site.xml <<EOF
<configuration>
  <property>
    <name>fs.defaultFS</name>
    <value>hdfs://$HADOOP_NAMENODE</value>
  </property>
</configuration>
EOF

echo "Waiting for namenode to start up..."
$HADOOP_HOME/bin/hdfs dfsadmin -safemode wait

export HADOOP_CONF_DIR=$(mktemp -d)
cat > $HADOOP_CONF_DIR/core-site.xml <<EOF
<configuration>
  <property>
    <name>fs.defaultFS</name>
    <value>hdfs://$HADOOP_NAMENODE</value>
  </property>
</configuration>
EOF

export HADOOP_FS="$HADOOP_HOME/bin/hadoop fs"
./fixtures.sh

echo "Please run the following commands:"
echo "export HADOOP_CONF_DIR='$HADOOP_CONF_DIR'"
echo "export HADOOP_FS='$HADOOP_HOME/bin/hadoop fs'"