--core-site.xml

 
   fs.default.name
   hdfs://hadoop1:54321
 
 
 hadoop.tmp.dir
 /data/hdfs/tmp
 



 
--hdfs-site.xml

 
   dfs.name.dir
   /data/hdfs/name
 


 
   dfs.data.dir
   /data/hdfs/data
 


 
   fs.checkpoint.dir
   /data/hdfs/namesecondary
 



--mapred-site.xml

 
   mapred.job.tracker
   hadoop1:54320
 


 
   mapred.local.dir
   /data/mapred/local
 


 
   mapred.system.dir
   /data/mapred/system
 

 
   mapred.tasktracker.map.tasks.maximum
   7
 


 
   mapred.tasktracker.reducre.tasks.maximum
   7
 

 
   mapred.child.java.opts
   -Xmx400m
 





--yarn-site.xml
  
   
  
yarn.resourcemanager.address  
hadoop1:8080
  
   
  
yarn.resourcemanager.scheduler.address  
hadoop1:8081  
  
   
  
yarn.resourcemanager.resource-tracker.address  
hadoop1:8082  
  
   
  
yarn.nodemanager.aux-services  
mapreduce_shuffle   
  
   
  
yarn.nodemanager.aux-services.mapreduce_shuffle.class  
org.apache.hadoop.mapred.ShuffleHandler  
     

  



======================================================================================
=========================格式化hdfs===================================================

[hadoop@hadoop1 sbin]$ hdfs namenode -format                                                 //hadoop namenode -format  语法被废弃
14/04/21 20:43:41 INFO namenode.NameNode: STARTUP_MSG: 
/************************************************************
STARTUP_MSG: Starting NameNode
STARTUP_MSG:   host = hadoop1/192.168.0.201
STARTUP_MSG:   args = [-format]
STARTUP_MSG:   version = 2.2.0
....

************************************************************/
14/04/21 20:43:41 INFO namenode.NameNode: registered UNIX signal handlers for [TERM, HUP, INT]
14/04/21 20:43:41 WARN util.NativeCodeLoader: Unable to load native-hadoop library for your platform... using builtin-java classes where applicable
14/04/21 20:43:41 WARN common.Util: Path /data/hdfs/name should be specified as a URI in configuration files. Please update hdfs configuration.
14/04/21 20:43:41 WARN common.Util: Path /data/hdfs/name should be specified as a URI in configuration files. Please update hdfs configuration.
Formatting using clusterid: CID-99c8a03a-d159-45f3-9493-5737343c5209
14/04/21 20:43:41 INFO namenode.HostFileManager: read includes:
HostSet(
)
14/04/21 20:43:41 INFO namenode.HostFileManager: read excludes:
HostSet(
)
14/04/21 20:43:41 INFO blockmanagement.DatanodeManager: dfs.block.invalidate.limit=1000
14/04/21 20:43:41 INFO util.GSet: Computing capacity for map BlocksMap
14/04/21 20:43:41 INFO util.GSet: VM type       = 32-bit
14/04/21 20:43:41 INFO util.GSet: 2.0% max memory = 966.7 MB
14/04/21 20:43:41 INFO util.GSet: capacity      = 2^22 = 4194304 entries
14/04/21 20:43:43 INFO blockmanagement.BlockManager: dfs.block.access.token.enable=false
14/04/21 20:43:43 INFO blockmanagement.BlockManager: defaultReplication         = 3
14/04/21 20:43:43 INFO blockmanagement.BlockManager: maxReplication             = 512
14/04/21 20:43:43 INFO blockmanagement.BlockManager: minReplication             = 1
14/04/21 20:43:43 INFO blockmanagement.BlockManager: maxReplicationStreams      = 2
14/04/21 20:43:43 INFO blockmanagement.BlockManager: shouldCheckForEnoughRacks  = false
14/04/21 20:43:43 INFO blockmanagement.BlockManager: replicationRecheckInterval = 3000
14/04/21 20:43:43 INFO blockmanagement.BlockManager: encryptDataTransfer        = false
14/04/21 20:43:43 INFO namenode.FSNamesystem: fsOwner             = hadoop (auth:SIMPLE)
14/04/21 20:43:43 INFO namenode.FSNamesystem: supergroup          = supergroup
14/04/21 20:43:43 INFO namenode.FSNamesystem: isPermissionEnabled = true
14/04/21 20:43:43 INFO namenode.FSNamesystem: HA Enabled: false
14/04/21 20:43:43 INFO namenode.FSNamesystem: Append Enabled: true
14/04/21 20:43:43 INFO util.GSet: Computing capacity for map INodeMap
14/04/21 20:43:43 INFO util.GSet: VM type       = 32-bit
14/04/21 20:43:43 INFO util.GSet: 1.0% max memory = 966.7 MB
14/04/21 20:43:43 INFO util.GSet: capacity      = 2^21 = 2097152 entries
14/04/21 20:43:44 INFO namenode.NameNode: Caching file names occuring more than 10 times
14/04/21 20:43:44 INFO namenode.FSNamesystem: dfs.namenode.safemode.threshold-pct = 0.9990000128746033
14/04/21 20:43:44 INFO namenode.FSNamesystem: dfs.namenode.safemode.min.datanodes = 0
14/04/21 20:43:44 INFO namenode.FSNamesystem: dfs.namenode.safemode.extension     = 30000
14/04/21 20:43:44 INFO namenode.FSNamesystem: Retry cache on namenode is enabled
14/04/21 20:43:44 INFO namenode.FSNamesystem: Retry cache will use 0.03 of total heap and retry cache entry expiry time is 600000 millis
14/04/21 20:43:44 INFO util.GSet: Computing capacity for map Namenode Retry Cache
14/04/21 20:43:44 INFO util.GSet: VM type       = 32-bit
14/04/21 20:43:44 INFO util.GSet: 0.029999999329447746% max memory = 966.7 MB
14/04/21 20:43:44 INFO util.GSet: capacity      = 2^16 = 65536 entries

Re-format filesystem in Storage Directory /data/hdfs/name ? (Y or N) Y
14/04/21 20:46:18 INFO common.Storage: Storage directory /data/hdfs/name has been successfully formatted.
14/04/21 20:46:19 INFO namenode.FSImage: Saving image file /data/hdfs/name/current/fsimage.ckpt_0000000000000000000 using no compression
14/04/21 20:46:19 INFO namenode.FSImage: Image file /data/hdfs/name/current/fsimage.ckpt_0000000000000000000 of size 198 bytes saved in 0 seconds.
14/04/21 20:46:19 INFO namenode.NNStorageRetentionManager: Going to retain 1 images with txid >= 0
14/04/21 20:46:19 INFO util.ExitUtil: Exiting with status 0
14/04/21 20:46:19 INFO namenode.NameNode: SHUTDOWN_MSG: 
/************************************************************
SHUTDOWN_MSG: Shutting down NameNode at hadoop1/192.168.0.201
************************************************************/


===============================================================================================
===========================================启动hdfs============================================



[hadoop@hadoop1 hadoop]$ start-dfs.sh 
14/04/21 20:54:26 WARN util.NativeCodeLoader: Unable to load native-hadoop library for your platform... using builtin-java classes where applicable
Starting namenodes on [hadoop1]
hadoop1: Error: JAVA_HOME is not set and could not be found.
hadoop2: Error: JAVA_HOME is not set and could not be found.
Starting secondary namenodes [0.0.0.0]

解决办法,设置hadoop_env.sh的JAVA_HOME为新安装的jdk的目录
JAVA_HOME=/opt/jdk




[hadoop@hadoop1 hadoop]$ start-dfs.sh 
14/04/21 21:50:45 DEBUG lib.MutableMetricsFactory: field org.apache.hadoop.metrics2.lib.MutableRate org.apache.hadoop.security.UserGroupInformation$UgiMetrics.loginSuccess with annotation @org.apache.hadoop.metrics2.annotation.Metric(valueName=Time, value=[Rate of successful kerberos logins and latency (milliseconds)], about=, always=false, type=DEFAULT, sampleName=Ops)
14/04/21 21:50:45 DEBUG lib.MutableMetricsFactory: field org.apache.hadoop.metrics2.lib.MutableRate org.apache.hadoop.security.UserGroupInformation$UgiMetrics.loginFailure with annotation @org.apache.hadoop.metrics2.annotation.Metric(valueName=Time, value=[Rate of failed kerberos logins and latency (milliseconds)], about=, always=false, type=DEFAULT, sampleName=Ops)
14/04/21 21:50:45 DEBUG impl.MetricsSystemImpl: UgiMetrics, User and group related metrics
14/04/21 21:50:45 DEBUG security.Groups:  Creating new Groups object
14/04/21 21:50:45 DEBUG util.NativeCodeLoader: Trying to load the custom-built native-hadoop library...
14/04/21 21:50:45 DEBUG util.NativeCodeLoader: Failed to load native-hadoop with error: java.lang.UnsatisfiedLinkError: /usr/local/hadoop2.2/lib/native/libhadoop.so.1.0.0: /lib/libc.so.6: version `GLIBC_2.6' not found (required by /usr/local/hadoop2.2/lib/native/libhadoop.so.1.0.0)
14/04/21 21:50:45 DEBUG util.NativeCodeLoader: java.library.path=/usr/local/hadoop2.2/lib/native
14/04/21 21:50:45 WARN util.NativeCodeLoader: Unable to load native-hadoop library for your platform... using builtin-java classes where applicable
14/04/21 21:50:45 DEBUG security.JniBasedUnixGroupsMappingWithFallback: Falling back to shell based
14/04/21 21:50:45 DEBUG security.JniBasedUnixGroupsMappingWithFallback: Group mapping impl=org.apache.hadoop.security.ShellBasedUnixGroupsMapping
14/04/21 21:50:45 DEBUG security.Groups: Group mapping impl=org.apache.hadoop.security.JniBasedUnixGroupsMappingWithFallback; cacheTimeout=300000
14/04/21 21:50:45 DEBUG security.UserGroupInformation: hadoop login
14/04/21 21:50:45 DEBUG security.UserGroupInformation: hadoop login commit
14/04/21 21:50:45 DEBUG security.UserGroupInformation: using local user:UnixPrincipal: hadoop
14/04/21 21:50:45 DEBUG security.UserGroupInformation: UGI loginUser:hadoop (auth:SIMPLE)
14/04/21 21:50:45 DEBUG security.UserGroupInformation: PrivilegedAction as:hadoop (auth:SIMPLE) from:org.apache.hadoop.hdfs.tools.GetConf.run(GetConf.java:314)
14/04/21 21:50:45 DEBUG impl.MetricsSystemImpl: StartupProgress, NameNode startup progress
Starting namenodes on [hadoop1]
hadoop1: starting namenode, logging to /usr/local/hadoop2.2/logs/hadoop-hadoop-namenode-hadoop1.out
hadoop2: starting datanode, logging to /usr/local/hadoop2.2/logs/hadoop-hadoop-datanode-hadoop2.out
Starting secondary namenodes [0.0.0.0]
The authenticity of host '0.0.0.0 (0.0.0.0)' can't be established.
RSA key fingerprint is a1:60:f5:71:da:5a:ca:75:f8:e5:8a:d5:eb:84:95:60.
Are you sure you want to continue connecting (yes/no)? yes
0.0.0.0: Warning: Permanently added '0.0.0.0' (RSA) to the list of known hosts.
0.0.0.0: starting secondarynamenode, logging to /usr/local/hadoop2.2/logs/hadoop-hadoop-secondarynamenode-hadoop1.out
14/04/21 21:52:55 DEBUG lib.MutableMetricsFactory: field org.apache.hadoop.metrics2.lib.MutableRate org.apache.hadoop.security.UserGroupInformation$UgiMetrics.loginSuccess with annotation @org.apache.hadoop.metrics2.annotation.Metric(valueName=Time, value=[Rate of successful kerberos logins and latency (milliseconds)], about=, always=false, type=DEFAULT, sampleName=Ops)
14/04/21 21:52:55 DEBUG lib.MutableMetricsFactory: field org.apache.hadoop.metrics2.lib.MutableRate org.apache.hadoop.security.UserGroupInformation$UgiMetrics.loginFailure with annotation @org.apache.hadoop.metrics2.annotation.Metric(valueName=Time, value=[Rate of failed kerberos logins and latency (milliseconds)], about=, always=false, type=DEFAULT, sampleName=Ops)
14/04/21 21:52:55 DEBUG impl.MetricsSystemImpl: UgiMetrics, User and group related metrics
14/04/21 21:52:56 DEBUG security.Groups:  Creating new Groups object
14/04/21 21:52:56 DEBUG util.NativeCodeLoader: Trying to load the custom-built native-hadoop library...
14/04/21 21:52:56 DEBUG util.NativeCodeLoader: Failed to load native-hadoop with error: java.lang.UnsatisfiedLinkError: /usr/local/hadoop2.2/lib/native/libhadoop.so.1.0.0: /lib/libc.so.6: version `GLIBC_2.6' not found (required by /usr/local/hadoop2.2/lib/native/libhadoop.so.1.0.0)
14/04/21 21:52:56 DEBUG util.NativeCodeLoader: java.library.path=/usr/local/hadoop2.2/lib/native
14/04/21 21:52:56 WARN util.NativeCodeLoader: Unable to load native-hadoop library for your platform... using builtin-java classes where applicable
14/04/21 21:52:56 DEBUG security.JniBasedUnixGroupsMappingWithFallback: Falling back to shell based
14/04/21 21:52:56 DEBUG security.JniBasedUnixGroupsMappingWithFallback: Group mapping impl=org.apache.hadoop.security.ShellBasedUnixGroupsMapping
14/04/21 21:52:56 DEBUG security.Groups: Group mapping impl=org.apache.hadoop.security.JniBasedUnixGroupsMappingWithFallback; cacheTimeout=300000
14/04/21 21:52:56 DEBUG security.UserGroupInformation: hadoop login
14/04/21 21:52:56 DEBUG security.UserGroupInformation: hadoop login commit
14/04/21 21:52:56 DEBUG security.UserGroupInformation: using local user:UnixPrincipal: hadoop
14/04/21 21:52:56 DEBUG security.UserGroupInformation: UGI loginUser:hadoop (auth:SIMPLE)
14/04/21 21:52:56 DEBUG security.UserGroupInformation: PrivilegedAction as:hadoop (auth:SIMPLE) from:org.apache.hadoop.hdfs.tools.GetConf.run(GetConf.java:314)