001/** 002 * Licensed to the Apache Software Foundation (ASF) under one 003 * or more contributor license agreements. See the NOTICE file 004 * distributed with this work for additional information 005 * regarding copyright ownership. The ASF licenses this file 006 * to you under the Apache License, Version 2.0 (the 007 * "License"); you may not use this file except in compliance 008 * with the License. You may obtain a copy of the License at 009 * 010 * http://www.apache.org/licenses/LICENSE-2.0 011 * 012 * Unless required by applicable law or agreed to in writing, software 013 * distributed under the License is distributed on an "AS IS" BASIS, 014 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 015 * See the License for the specific language governing permissions and 016 * limitations under the License. 017 */ 018package org.apache.hadoop.hdfs.tools; 019 020import java.io.IOException; 021import java.io.PrintStream; 022import java.net.InetSocketAddress; 023import java.security.PrivilegedExceptionAction; 024import java.util.Arrays; 025import java.util.HashMap; 026import java.util.List; 027import java.util.Map; 028 029import org.apache.hadoop.HadoopIllegalArgumentException; 030import org.apache.hadoop.conf.Configuration; 031import org.apache.hadoop.conf.Configured; 032import org.apache.hadoop.hdfs.DFSConfigKeys; 033import org.apache.hadoop.hdfs.DFSUtil; 034import org.apache.hadoop.hdfs.HdfsConfiguration; 035import org.apache.hadoop.hdfs.DFSUtil.ConfiguredNNAddress; 036import org.apache.hadoop.security.UserGroupInformation; 037import org.apache.hadoop.util.StringUtils; 038import org.apache.hadoop.util.Tool; 039import org.apache.hadoop.util.ToolRunner; 040 041/** 042 * Tool for getting configuration information from a configuration file. 043 * 044 * Adding more options: 045 * <ul> 046 * <li> 047 * If adding a simple option to get a value corresponding to a key in the 048 * configuration, use regular {@link GetConf.CommandHandler}. 049 * See {@link GetConf.Command#EXCLUDE_FILE} example. 050 * </li> 051 * <li> 052 * If adding an option that is does not return a value for a key, add 053 * a subclass of {@link GetConf.CommandHandler} and set it up in 054 * {@link GetConf.Command}. 055 * 056 * See {@link GetConf.Command#NAMENODE} for example. 057 * 058 * Add for the new option added, a map entry with the corresponding 059 * {@link GetConf.CommandHandler}. 060 * </ul> 061 */ 062public class GetConf extends Configured implements Tool { 063 private static final String DESCRIPTION = "hdfs getconf is utility for " 064 + "getting configuration information from the config file.\n"; 065 066 enum Command { 067 NAMENODE("-namenodes", "gets list of namenodes in the cluster."), 068 SECONDARY("-secondaryNameNodes", 069 "gets list of secondary namenodes in the cluster."), 070 BACKUP("-backupNodes", "gets list of backup nodes in the cluster."), 071 INCLUDE_FILE("-includeFile", 072 "gets the include file path that defines the datanodes " + 073 "that can join the cluster."), 074 EXCLUDE_FILE("-excludeFile", 075 "gets the exclude file path that defines the datanodes " + 076 "that need to decommissioned."), 077 NNRPCADDRESSES("-nnRpcAddresses", "gets the namenode rpc addresses"), 078 CONFKEY("-confKey [key]", "gets a specific key from the configuration"); 079 080 private static final Map<String, CommandHandler> map; 081 static { 082 map = new HashMap<String, CommandHandler>(); 083 map.put(StringUtils.toLowerCase(NAMENODE.getName()), 084 new NameNodesCommandHandler()); 085 map.put(StringUtils.toLowerCase(SECONDARY.getName()), 086 new SecondaryNameNodesCommandHandler()); 087 map.put(StringUtils.toLowerCase(BACKUP.getName()), 088 new BackupNodesCommandHandler()); 089 map.put(StringUtils.toLowerCase(INCLUDE_FILE.getName()), 090 new CommandHandler(DFSConfigKeys.DFS_HOSTS)); 091 map.put(StringUtils.toLowerCase(EXCLUDE_FILE.getName()), 092 new CommandHandler(DFSConfigKeys.DFS_HOSTS_EXCLUDE)); 093 map.put(StringUtils.toLowerCase(NNRPCADDRESSES.getName()), 094 new NNRpcAddressesCommandHandler()); 095 map.put(StringUtils.toLowerCase(CONFKEY.getName()), 096 new PrintConfKeyCommandHandler()); 097 } 098 099 private final String cmd; 100 private final String description; 101 102 Command(String cmd, String description) { 103 this.cmd = cmd; 104 this.description = description; 105 } 106 107 public String getName() { 108 return cmd.split(" ")[0]; 109 } 110 111 public String getUsage() { 112 return cmd; 113 } 114 115 public String getDescription() { 116 return description; 117 } 118 119 public static CommandHandler getHandler(String cmd) { 120 return map.get(StringUtils.toLowerCase(cmd)); 121 } 122 } 123 124 static final String USAGE; 125 static { 126 HdfsConfiguration.init(); 127 128 /* Initialize USAGE based on Command values */ 129 StringBuilder usage = new StringBuilder(DESCRIPTION); 130 usage.append("\nhadoop getconf \n"); 131 for (Command cmd : Command.values()) { 132 usage.append("\t[" + cmd.getUsage() + "]\t\t\t" + cmd.getDescription() 133 + "\n"); 134 } 135 USAGE = usage.toString(); 136 } 137 138 /** 139 * Handler to return value for key corresponding to the {@link Command} 140 */ 141 static class CommandHandler { 142 String key; // Configuration key to lookup 143 144 CommandHandler() { 145 this(null); 146 } 147 148 CommandHandler(String key) { 149 this.key = key; 150 } 151 152 final int doWork(GetConf tool, String[] args) { 153 try { 154 checkArgs(args); 155 156 return doWorkInternal(tool, args); 157 } catch (Exception e) { 158 tool.printError(e.getMessage()); 159 } 160 return -1; 161 } 162 163 protected void checkArgs(String args[]) { 164 if (args.length > 0) { 165 throw new HadoopIllegalArgumentException( 166 "Did not expect argument: " + args[0]); 167 } 168 } 169 170 171 /** Method to be overridden by sub classes for specific behavior */ 172 int doWorkInternal(GetConf tool, String[] args) throws Exception { 173 174 String value = tool.getConf().getTrimmed(key); 175 if (value != null) { 176 tool.printOut(value); 177 return 0; 178 } 179 tool.printError("Configuration " + key + " is missing."); 180 return -1; 181 } 182 } 183 184 /** 185 * Handler for {@link Command#NAMENODE} 186 */ 187 static class NameNodesCommandHandler extends CommandHandler { 188 @Override 189 int doWorkInternal(GetConf tool, String []args) throws IOException { 190 tool.printMap(DFSUtil.getNNServiceRpcAddressesForCluster(tool.getConf())); 191 return 0; 192 } 193 } 194 195 /** 196 * Handler for {@link Command#BACKUP} 197 */ 198 static class BackupNodesCommandHandler extends CommandHandler { 199 @Override 200 public int doWorkInternal(GetConf tool, String []args) throws IOException { 201 tool.printMap(DFSUtil.getBackupNodeAddresses(tool.getConf())); 202 return 0; 203 } 204 } 205 206 /** 207 * Handler for {@link Command#SECONDARY} 208 */ 209 static class SecondaryNameNodesCommandHandler extends CommandHandler { 210 @Override 211 public int doWorkInternal(GetConf tool, String []args) throws IOException { 212 tool.printMap(DFSUtil.getSecondaryNameNodeAddresses(tool.getConf())); 213 return 0; 214 } 215 } 216 217 /** 218 * Handler for {@link Command#NNRPCADDRESSES} 219 * If rpc addresses are defined in configuration, we return them. Otherwise, 220 * return empty string. 221 */ 222 static class NNRpcAddressesCommandHandler extends CommandHandler { 223 @Override 224 public int doWorkInternal(GetConf tool, String []args) throws IOException { 225 Configuration config = tool.getConf(); 226 List<ConfiguredNNAddress> cnnlist = DFSUtil.flattenAddressMap( 227 DFSUtil.getNNServiceRpcAddressesForCluster(config)); 228 if (!cnnlist.isEmpty()) { 229 for (ConfiguredNNAddress cnn : cnnlist) { 230 InetSocketAddress rpc = cnn.getAddress(); 231 tool.printOut(rpc.getHostName()+":"+rpc.getPort()); 232 } 233 return 0; 234 } 235 tool.printError("Did not get namenode service rpc addresses."); 236 return -1; 237 } 238 } 239 240 static class PrintConfKeyCommandHandler extends CommandHandler { 241 @Override 242 protected void checkArgs(String[] args) { 243 if (args.length != 1) { 244 throw new HadoopIllegalArgumentException( 245 "usage: " + Command.CONFKEY.getUsage()); 246 } 247 } 248 249 @Override 250 int doWorkInternal(GetConf tool, String[] args) throws Exception { 251 this.key = args[0]; 252 return super.doWorkInternal(tool, args); 253 } 254 } 255 256 private final PrintStream out; // Stream for printing command output 257 private final PrintStream err; // Stream for printing error 258 259 GetConf(Configuration conf) { 260 this(conf, System.out, System.err); 261 } 262 263 GetConf(Configuration conf, PrintStream out, PrintStream err) { 264 super(conf); 265 this.out = out; 266 this.err = err; 267 } 268 269 void printError(String message) { 270 err.println(message); 271 } 272 273 void printOut(String message) { 274 out.println(message); 275 } 276 277 void printMap(Map<String, Map<String, InetSocketAddress>> map) { 278 StringBuilder buffer = new StringBuilder(); 279 280 List<ConfiguredNNAddress> cnns = DFSUtil.flattenAddressMap(map); 281 for (ConfiguredNNAddress cnn : cnns) { 282 InetSocketAddress address = cnn.getAddress(); 283 if (buffer.length() > 0) { 284 buffer.append(" "); 285 } 286 buffer.append(address.getHostName()); 287 } 288 printOut(buffer.toString()); 289 } 290 291 private void printUsage() { 292 printError(USAGE); 293 } 294 295 /** 296 * Main method that runs the tool for given arguments. 297 * @param args arguments 298 * @return return status of the command 299 */ 300 private int doWork(String[] args) { 301 if (args.length >= 1) { 302 CommandHandler handler = Command.getHandler(args[0]); 303 if (handler != null) { 304 return handler.doWork(this, 305 Arrays.copyOfRange(args, 1, args.length)); 306 } 307 } 308 printUsage(); 309 return -1; 310 } 311 312 @Override 313 public int run(final String[] args) throws Exception { 314 try { 315 return UserGroupInformation.getCurrentUser().doAs( 316 new PrivilegedExceptionAction<Integer>() { 317 @Override 318 public Integer run() throws Exception { 319 return doWork(args); 320 } 321 }); 322 } catch (InterruptedException e) { 323 throw new IOException(e); 324 } 325 } 326 327 public static void main(String[] args) throws Exception { 328 if (DFSUtil.parseHelpArgument(args, USAGE, System.out, true)) { 329 System.exit(0); 330 } 331 332 int res = ToolRunner.run(new GetConf(new HdfsConfiguration()), args); 333 System.exit(res); 334 } 335}