postgresql.sh 1020 B

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566
  1. #!/bin/bash
  2. set -e
  3. dever_yarn()
  4. {
  5. $HADOOP_HOME/sbin/start-yarn.sh
  6. }
  7. dever_dfs()
  8. {
  9. $HADOOP_HOME/sbin/start-dfs.sh
  10. }
  11. dever_start()
  12. {
  13. dever_dfs
  14. echo -e "\n"
  15. dever_yarn
  16. echo -e "\n"
  17. }
  18. dever_wordcount()
  19. {
  20. mkdir input
  21. echo "Hello Docker" >input/file2.txt
  22. echo "Hello Hadoop" >input/file1.txt
  23. # create input directory on HDFS
  24. hadoop fs -mkdir -p input
  25. # put input files to HDFS
  26. hdfs dfs -put ./input/* input
  27. # run wordcount
  28. hadoop jar $HADOOP_HOME/share/hadoop/mapreduce/sources/hadoop-mapreduce-examples-2.7.2-sources.jar org.apache.hadoop.examples.WordCount input output
  29. # print the input files
  30. echo -e "\ninput file1.txt:"
  31. hdfs dfs -cat input/file1.txt
  32. echo -e "\ninput file2.txt:"
  33. hdfs dfs -cat input/file2.txt
  34. # print the output of wordcount
  35. echo -e "\nwordcount output:"
  36. hdfs dfs -cat output/part-r-00000
  37. }
  38. dever_ssh()
  39. {
  40. /usr/sbin/sshd
  41. }
  42. if [ "$1" = 'start' ]; then
  43. dever_start
  44. fi
  45. if [ "$1" = 'wordcount' ]; then
  46. dever_wordcount
  47. fi
  48. if [ "$1" = 'ssh' ]; then
  49. dever_ssh
  50. fi
  51. exec $0