当前位置:网站首页>args参数解析
args参数解析
2022-07-23 02:32:00 【南风知我意丿】
痛点
def main(args: Array[String]): Unit
一般情况下我们是通过 args(0),args(1)。。。取传入程序的参数,少的话还好,参数一旦很多的话 就会很乱,时间长了,我们就不知道传递的参数的含义了
类似效果
Usage: hadoop fs [generic options]
[-appendToFile <localsrc> ... <dst>]
[-cat [-ignoreCrc] <src> ...]
[-checksum <src> ...]
[-chgrp [-R] GROUP PATH...]
[-chmod [-R] <MODE[,MODE]... | OCTALMODE> PATH...]
[-chown [-R] [OWNER][:[GROUP]] PATH...]
[-copyFromLocal [-f] [-p] [-l] [-d] [-t <thread count>] <localsrc> ... <dst>]
[-copyToLocal [-f] [-p] [-ignoreCrc] [-crc] <src> ... <localdst>]
[-count [-q] [-h] [-v] [-t [<storage type>]] [-u] [-x] [-e] <path> ...]
[-cp [-f] [-p | -p[topax]] [-d] <src> ... <dst>]
[-createSnapshot <snapshotDir> [<snapshotName>]]
[-deleteSnapshot <snapshotDir> <snapshotName>]
[-df [-h] [<path> ...]]
[-du [-s] [-h] [-v] [-x] <path> ...]
[-expunge]
实现-scala
private val options: Options = new Options
private val option_h_desc: String =
""" |print help info |usage: | java -cp jarLocation fullClassName [-h] || [-t]&&[-g]&&[-maxR] 1> applicationName.log 2>&1 & | |data flow: | kafka => Spark Streaming | RedisStateStorePerDay | |programs: | SampleFirstScanStateLaunch => -t&&-g&&-i&&-maxR | SampleFirstScanSinkHbaseLaunch => -pn&&-pt&&-t&&-g&&-i&&-maxR | SampleFirstScanSinkHiveLaunch => -hd&&-ht&&-t&&-g&&-i&&-maxR |outer: | 1.maxRatePerPartition throughput default 1000. | 2.phoenix table name && namespace Capital | | |""".stripMargin
options.addOption("h", "help", false, option_h_desc)
options.addOption("t", "topic", true, "kafka topic")
options.addOption("g", "group.id", true, "kafka consumer group id")
options.addOption("maxR", "maxRatePerPartition", true, "kafka topic partition max Rate")
options.addOption("i", "interval", true, "SparkStreaming direct Kafka Batch Interval [s]")
//hive || phoenix
// options.addOption("hd", "hive.database", true, "FirstScanSinkHive hive database")
// options.addOption("ht", "hive.table", true, "FirstScanSinkHive hive table")
options.addOption("pn", "phoenix.namespace", true, "FirstScanSinkPhoenix phoenix namespace")
options.addOption("pt", "phoenix.table", true, "FirstSinkPhoenix phoenix table")
options.addOption("hp", "hp", true, "写入hive还是hbase h=hive p=phoenix hp=hive和phoenix")
options.addOption("local", "local", true, "是否启用loncal(*)")
private val parser: PosixParser = new PosixParser
def parseParam(args: Array[String], applicationName: String): ExecutorParam = {
if (args == null) {
logger.error("HR => param is NULL")
System.exit(ERROR_EXIT_STATUS)
}
var line: CommandLine = null
try {
line = parser.parse(options, args)
} catch {
case e: Exception => logger.error("HR => args parse Exception. parse check param")
println(e.getStackTrace.mkString("Array(", ", ", ")"))
System.exit(ERROR_EXIT_STATUS)
}
if (line.hasOption("h")) {
new HelpFormatter().printHelp("desc", options)
System.exit(ERROR_EXIT_STATUS)
}
val topic: String = line.getOptionValue("topic")
val groupId: String = line.getOptionValue("group.id")
val maxRatePerPartition: String = line.getOptionValue("maxRatePerPartition", "1000")
val interval: String = line.getOptionValue("interval")
val hp: String = line.getOptionValue("hp")
val local: String = line.getOptionValue("local")
if (topic == null || groupId == null || interval == null || hp == null || local == null) {
logger.error(s"HR => topic|groupId|interval Exception.parse check param")
System.exit(ERROR_EXIT_STATUS)
}
logger.info(s"HR => paramList[topic : $topic \n groupId : $groupId \n maxRatePerPartition : $maxRatePerPartition \n interval : $interval]")
val phoenixNameSpace: String = line.getOptionValue("phoenix.namespace")
val phoenixTable: String = line.getOptionValue("phoenix.table")
logger.info(s"HR => paramList[ phoenix.namespace : $phoenixNameSpace \n phoenix.table : $phoenixTable]")
// val hiveDatabase: String = line.getOptionValue("hive.database")
// val hiveTable: String = line.getOptionValue("hive.table")
// logger.info(s"HR => paramList[ hive.database : $hiveDatabase \n hive.table : $hiveTable]")
ExecutorParam(topic, groupId, maxRatePerPartition.toInt, interval.toInt, applicationName + hp, phoenixNameSpace, phoenixTable, hp, local.toBoolean)
}
}
case class ExecutorParam(
topic: String,
groupId: String,
maxRatePerPartition: Int,
interval: Int,
applicationName: String,
phoenixNameSpace: String,
phoenixTable: String,
hp: String,
local: Boolean)
如何传参 - 示例
-maxR 500 \
-t hr_task_scan \
-i 3 \
-g SplitTaskTypeUOFFICIAL \
-pn OFFICIAL \
-pt SAMPLE_U_SCAN,SAMPLE_U_SCAN_LATEST,SAMPLE_T_SCAN_LATEST \
-local false
如何获取参数 - 示例
//解析参数
val param: ExecutorParam = Constant.parseParam(args, this.getClass.getSimpleName)
//获取参数
val applicationName: String = param.applicationName
val maxRatePerPartition: Int = param.maxRatePerPartition
val conf: SparkConf = new SparkConf()
val topic: String = param.topic
val groupId: String = param.groupId
val local: Boolean = param.local
边栏推荐
猜你喜欢

笔记——记录@RefreshScope动态刷新配置失效的解决方式

567. Arrangement of strings

Fireshare for short video sharing

亿级流量下的分布式锁优化方案!太好用了~

CANopen通信----PDO与SDO

面试官:解释一下ThreadLocal 核心原理

Excel简单使用宏

PNA peptide nucleic acid modified polypeptide z-gly-pro-pna | d-phe-pip-arg-pna | TOS Gly Pro Arg PNA

TDengine 助力西门子轻量级数字化解决方案 SIMICAS 简化数据处理流程

7月排行榜新看点?这个语言无敌了?
随机推荐
中信期货网上开户是否安全,网上开户会被骗吗?
DNS and DHCP of network security foundation
【C语言对链表的操作(链表的初始化,建立,求长,增加,删除,以及输出)】
Blog milestones
Pytorch save and load model
[secret history of bug] uint8 data exceeds the type range, output 0x0102
【Node中间层实践(四)】----express中间层的逻辑处理
Baidu map eagle eye track service
构建一个CPU模拟器
IDM最新版软件的安装下载和使用方法
CAD如何绘制带有弧形的箭头
面试官:解释一下ThreadLocal 核心原理
笔记——记录@RefreshScope动态刷新配置失效的解决方式
Weekly recommended short video: why write such a book?
PNA modified polypeptide BZ Val Gly Arg PNA | BOC Val Leu Gly Arg PNA
RESTful是什么
I want to learn financial management in digging money. Is it safe to open an account?
想放弃软件测试了,4年经验去面试10分钟结束,测试现在这么难了?
目前都有哪些年利率6%左右的保本理财产品?
百度地图鹰眼轨迹服务