private static Options buildGeneralOptions(Options opts) { Options r = new Options(); for (Object o : opts.getOptions()) r.addOption((Option) o); Option jar = OptionBuilder.withArgName("path").hasArg() .withDescription("topology jar of the submitted topology").create("jar"); r.addOption(jar); Option conf = OptionBuilder.withArgName("configuration file").hasArg() .withDescription("an application configuration file").create("conf"); r.addOption(conf); return r; }
public BeelineOptionsProcessor() { options.addOption(OptionBuilder.hasArg().withArgName("url").create('u')); options.addOption(OptionBuilder.hasArg().withArgName("username").create('n')); options.addOption(OptionBuilder.hasArg().withArgName("password").create('p')); }
@Test public void testGetParsedOptionValue() throws Exception { Options options = new Options(); options.addOption(OptionBuilder.hasArg().withType(Number.class).create("i")); options.addOption(OptionBuilder.hasArg().create("f")); CommandLineParser parser = new DefaultParser(); CommandLine cmd = parser.parse(options, new String[] { "-i", "123", "-f", "foo" }); assertEquals(123, ((Number) cmd.getParsedOptionValue("i")).intValue()); assertEquals("foo", cmd.getParsedOptionValue("f")); }
private static Options buildGeneralOptions(Options opts) { Options r = new Options(); for (Object o : opts.getOptions()) r.addOption((Option) o); Option workerNum = OptionBuilder.withArgName("worker num per batch").hasArg() .withDescription("number of workers to upgrade").create("n"); r.addOption(workerNum); Option comp = OptionBuilder.withArgName("component to upgrade").hasArg() .withDescription("component id to upgrade, note that only one component is allowed at a time") .create("p"); r.addOption(comp); Option workers = OptionBuilder.withArgName("workers to upgrade").hasArg() .withDescription("workers to upgrade, in the format: host1:port1,host2:port2,...").create("w"); r.addOption(workers); return r; }
@Test public void test11456() throws Exception { // Posix Options options = new Options(); options.addOption( OptionBuilder.hasOptionalArg().create( 'a' ) ); options.addOption( OptionBuilder.hasArg().create( 'b' ) ); String[] args = new String[] { "-a", "-bvalue" }; CommandLineParser parser = new PosixParser(); CommandLine cmd = parser.parse( options, args ); assertEquals( cmd.getOptionValue( 'b' ), "value" ); // GNU options = new Options(); options.addOption( OptionBuilder.hasOptionalArg().create( 'a' ) ); options.addOption( OptionBuilder.hasArg().create( 'b' ) ); args = new String[] { "-a", "-b", "value" }; parser = new GnuParser(); cmd = parser.parse( options, args ); assertEquals( cmd.getOptionValue( 'b' ), "value" ); }
@SuppressWarnings("static-access") HiveMetastoreCli(Configuration configuration) { super("hivemetastore", true); this.port = MetastoreConf.getIntVar(configuration, ConfVars.SERVER_PORT); // -p port OPTIONS.addOption(OptionBuilder .hasArg() .withArgName("port") .withDescription("Hive Metastore port number, default:" + this.port) .create('p')); }
@Test public void testDoubleDash2() throws Exception { Options options = new Options(); options.addOption(OptionBuilder.hasArg().create('n')); options.addOption(OptionBuilder.create('m')); try { parser.parse(options, new String[]{"-n", "--", "-m"}); fail("MissingArgumentException not thrown for option -n"); } catch (MissingArgumentException e) { assertNotNull("option null", e.getOption()); assertEquals("n", e.getOption().getOpt()); } }
private static CommandLine parseArgs(String[] args) throws ParseException { Options options = new Options(); options.addOption(OptionBuilder.withLongOpt("file") .withDescription("sequence file to read") .hasArg() .withArgName("<sequence_file_name>") .withType(String.class) .create("f")); options.addOption("o", "print_offsets_only", false, "whether to print only offsets " + "ignoring the message payload"); CommandLineParser parser = new GnuParser(); return parser.parse(options, args); }
public CryptoProcessor(HadoopShims.HdfsEncryptionShim encryptionShim, HiveConf conf) { this.encryptionShim = encryptionShim; this.conf = conf; CREATE_KEY_OPTIONS = new Options(); CREATE_KEY_OPTIONS.addOption(OptionBuilder.hasArg().withLongOpt("keyName").isRequired().create()); CREATE_KEY_OPTIONS.addOption(OptionBuilder.hasArg().withLongOpt("bitLength").create()); // optional DELETE_KEY_OPTIONS = new Options(); DELETE_KEY_OPTIONS.addOption(OptionBuilder.hasArg().withLongOpt("keyName").isRequired().create()); CREATE_ZONE_OPTIONS = new Options(); CREATE_ZONE_OPTIONS.addOption(OptionBuilder.hasArg().withLongOpt("keyName").isRequired().create()); CREATE_ZONE_OPTIONS.addOption(OptionBuilder.hasArg().withLongOpt("path").isRequired().create()); }
static Options buildGeneralOptions(Options opts) { Options r = new Options(); for (Object o : opts.getOptions()) r.addOption((Option) o); Option libjars = OptionBuilder.withArgName("paths").hasArg().withDescription("comma separated jars to be used by the submitted topology").create("libjars"); r.addOption(libjars); optionProcessors.put("libjars", new LibjarsProcessor()); Option conf = OptionBuilder.withArgName("configuration file").hasArg().withDescription("an application configuration file").create("conf"); r.addOption(conf); optionProcessors.put("conf", new ConfFileProcessor()); // Must come after `conf': this option is of higher priority Option extraConfig = OptionBuilder.withArgName("D").hasArg().withDescription("extra configurations (preserving types)").create("D"); r.addOption(extraConfig); optionProcessors.put("D", new ExtraConfigProcessor()); return r; }
public CryptoProcessor(HadoopShims.HdfsEncryptionShim encryptionShim, HiveConf conf) { this.encryptionShim = encryptionShim; this.conf = conf; CREATE_KEY_OPTIONS = new Options(); CREATE_KEY_OPTIONS.addOption(OptionBuilder.hasArg().withLongOpt("keyName").isRequired().create()); CREATE_KEY_OPTIONS.addOption(OptionBuilder.hasArg().withLongOpt("bitLength").create()); // optional DELETE_KEY_OPTIONS = new Options(); DELETE_KEY_OPTIONS.addOption(OptionBuilder.hasArg().withLongOpt("keyName").isRequired().create()); CREATE_ZONE_OPTIONS = new Options(); CREATE_ZONE_OPTIONS.addOption(OptionBuilder.hasArg().withLongOpt("keyName").isRequired().create()); CREATE_ZONE_OPTIONS.addOption(OptionBuilder.hasArg().withLongOpt("path").isRequired().create()); }
/** * Instantiates a new CLI options processor. */ @SuppressWarnings("static-access") public OptionsProcessor() { // -e 'quoted-query-string' options.addOption(OptionBuilder.hasArg() .withArgName("quoted-command-string") .withDescription("Remote command line").create('e')); // -h hostname/ippaddress options.addOption(OptionBuilder.hasArg().withArgName("hostname") .withDescription("connecting to Thrift Server on remote host") .create('h')); // -p port options.addOption(OptionBuilder.hasArg().withArgName("port") .withDescription("connecting to Thrift Server on port number") .create('p')); // -c 'thrift-config' options.addOption(OptionBuilder.hasArg().withArgName("thrift-config") .withDescription("Thrift property file").create('c')); // [-H|--help] options.addOption(new Option("H", "help", false, "Print help information")); }
static Options createOptions() { Options result = new Options(); result.addOption(OptionBuilder .withLongOpt("check-only") .withDescription("Check acid orc file for valid acid key index and exit without fixing") .create('c')); result.addOption(OptionBuilder .withLongOpt("recover") .withDescription("Fix the acid key index for acid orc file if it requires fixing") .create('r')); result.addOption(OptionBuilder .withLongOpt("backup-path") .withDescription("specify a backup path to store the corrupted files (default: /tmp)") .hasArg() .create()); result.addOption(OptionBuilder .withLongOpt("help") .withDescription("print help message") .create('h')); return result; }
static Options createOptions() { Options result = new Options(); result.addOption(OptionBuilder .withLongOpt("location") .withDescription("HS2 url") .hasArg() .create('l')); result.addOption(OptionBuilder .withLongOpt("user") .withDescription("user name") .hasArg() .create('u')); result.addOption(OptionBuilder .withLongOpt("pwd") .withDescription("password") .hasArg() .create('p')); .hasArg() .withValueSeparator() .hasArgs(2) .withArgName("property=value") .hasArg(false)
/** * Create commandLine options * @return */ private static Options initCommandLineOptions(){ Options options = new Options(); options.addOption(OptionBuilder.withLongOpt("gui") .withDescription("Start VectAlign GUI") .create(OPTION_GUI)); options.addOption(OptionBuilder.withLongOpt("start") .withDescription("Starting VectorDrawable path (\"string\", txt file or SVG file)") .hasArg() .withArgName("\"string\"|txt_file|svg_file") .create(OPTION_FROM)); options.addOption(OptionBuilder.withLongOpt("end") .withDescription("Ending VectorDrawable path (\"string\", txt file or SVG file)") .hasArg() .withArgName("\"string\"|txt_file|svg_file") .create(OPTION_TO)); options.addOption(OptionBuilder.withLongOpt("mode") .withDescription("Aligning technique (default is BASE)") .hasArg() .withArgName("BASE|LINEAR|SUB_BASE|SUB_LINEAR") .create(OPTION_MODE)); options.addOption(OptionBuilder.withLongOpt("version") .withDescription("Print the version of the application") .create(OPTION_VERSION)); options.addOption(OptionBuilder.withLongOpt("help").create(OPTION_HELP)); return options; }
synchronized (OptionBuilder.class) { Options options = new Options(); Option oconf = OptionBuilder.withArgName("configuration file") .hasArg() .withDescription("specify an application configuration file") .withLongOpt(ARG_CONF) .create(ARG_CONF_SHORT); Option confclass = OptionBuilder.withArgName("configuration classname") .hasArg() .withDescription( "Classname of a Hadoop Configuration subclass to load") .withLongOpt(ARG_CONFCLASS) .create(ARG_CONFCLASS_SHORT); Option property = OptionBuilder.withArgName("property=value") .hasArg() .withDescription("use value for given property") .create('D'); options.addOption(oconf); options.addOption(property); options.addOption(confclass); return options;
public CliOptionsProcessor() { options.addOption(OptionBuilder.hasArg().withArgName("databasename").withLongOpt("database") .withDescription("Specify the database to use").create()); options.addOption(OptionBuilder.hasArg().withArgName("quoted-query-string").withDescription ("SQL from command line").create('e')); options.addOption(OptionBuilder.hasArg().withArgName("filename").withDescription("SQL from " + "files").create('f')); options.addOption(OptionBuilder.hasArg().withArgName("filename").withDescription ("Initialization SQL file").create('i')); options.addOption(OptionBuilder.withValueSeparator().hasArgs(2).withArgName("property=value") .withLongOpt("hiveconf").withDescription("Use value for given property").create()); options.addOption(OptionBuilder.withValueSeparator().hasArgs(2).withArgName("key=value") .withLongOpt("define").withDescription("Variable substitution to apply to Hive commands. e" + ".g. -d A=B or --define A=B").create('d'));
private static CommandLine parseArgs(String[] args) throws ParseException { Options options = new Options(); options.addOption(OptionBuilder.withLongOpt("command") .withDescription("command name. One of \"delete_committed_offsets\"") .hasArg() .withArgName("<command>") .withType(String.class) .create("c")); options.addOption(OptionBuilder.withLongOpt("topic") .withDescription("topic whose offset should be read") .hasArg() .withArgName("<topic>") .withType(String.class) .create("t")); options.addOption(OptionBuilder.withLongOpt("partition") .withDescription("kafka partition whose offset should be read") .hasArg() .withArgName("<partition>") .withType(Number.class) .create("p")); CommandLineParser parser = new GnuParser(); return parser.parse(options, args); }