This is an old revision of the document!
Install Hadoop eco-system singlemode
install openjdk debian
sudo apt-get install openjdk-7-jdk ssh rsync cd /tmp wget http://apache.mivzakim.net/hadoop/common/stable/hadoop-2.5.0.tar.gz tar xzf hadoop-2.5.0.tar.gz sudo mv hadoop-2.5.0 /usr/local/ sudo ln -s /usr/local/hadoop-* /usr/local/hadoop sudo chown -R hduser:hadoop /usr/local/hadoop* sudo addgroup hadoop sudo adduser --ingroup hadoop hduser su - hduser ssh-keygen -t rsa -P "" cat $HOME/.ssh/id_rsa.pub >> $HOME/.ssh/authorized_keys
Edit ~/.bashrc
export HADOOP_HOME=/usr/local/hadoop # Set JAVA_HOME (we will also configure JAVA_HOME directly for Hadoop later on) export JAVA_HOME=/usr/lib/jvm/java-7-openjdk-amd64/jre # Some convenient aliases and functions for running Hadoop-related commands unalias hfs &> /dev/null alias hfs="hadoop fs" unalias hls &> /dev/null alias hls="fs -ls" # If you have LZO compression enabled in your Hadoop cluster and # compress job outputs with LZOP (not covered in this tutorial): # Conveniently inspect an LZOP compressed file from the command # line; run via: # # $ lzohead /hdfs/path/to/lzop/compressed/file.lzo # # Requires installed 'lzop' command. # lzohead () { hadoop fs -cat $1 | lzop -dc | head -1000 | less } # Add Hadoop bin/ directory to PATH export PATH=$PATH:$HADOOP_HOME/bin