当前位置:网站首页>Args parameter parsing
Args parameter parsing
2022-07-23 10:03:00 【The south wind knows what I mean】
List of articles
Pain points
def main(args: Array[String]): Unit
In general, we go through args(0),args(1)... Take the parameters passed into the program , It's ok if there's less , Once there are many parameters It's going to be messy , After a long time , We don't know the meaning of the passed parameters
Similar effect
Usage: hadoop fs [generic options]
[-appendToFile <localsrc> ... <dst>]
[-cat [-ignoreCrc] <src> ...]
[-checksum <src> ...]
[-chgrp [-R] GROUP PATH...]
[-chmod [-R] <MODE[,MODE]... | OCTALMODE> PATH...]
[-chown [-R] [OWNER][:[GROUP]] PATH...]
[-copyFromLocal [-f] [-p] [-l] [-d] [-t <thread count>] <localsrc> ... <dst>]
[-copyToLocal [-f] [-p] [-ignoreCrc] [-crc] <src> ... <localdst>]
[-count [-q] [-h] [-v] [-t [<storage type>]] [-u] [-x] [-e] <path> ...]
[-cp [-f] [-p | -p[topax]] [-d] <src> ... <dst>]
[-createSnapshot <snapshotDir> [<snapshotName>]]
[-deleteSnapshot <snapshotDir> <snapshotName>]
[-df [-h] [<path> ...]]
[-du [-s] [-h] [-v] [-x] <path> ...]
[-expunge]
Realization -scala
private val options: Options = new Options
private val option_h_desc: String =
""" |print help info |usage: | java -cp jarLocation fullClassName [-h] || [-t]&&[-g]&&[-maxR] 1> applicationName.log 2>&1 & | |data flow: | kafka => Spark Streaming | RedisStateStorePerDay | |programs: | SampleFirstScanStateLaunch => -t&&-g&&-i&&-maxR | SampleFirstScanSinkHbaseLaunch => -pn&&-pt&&-t&&-g&&-i&&-maxR | SampleFirstScanSinkHiveLaunch => -hd&&-ht&&-t&&-g&&-i&&-maxR |outer: | 1.maxRatePerPartition throughput default 1000. | 2.phoenix table name && namespace Capital | | |""".stripMargin
options.addOption("h", "help", false, option_h_desc)
options.addOption("t", "topic", true, "kafka topic")
options.addOption("g", "group.id", true, "kafka consumer group id")
options.addOption("maxR", "maxRatePerPartition", true, "kafka topic partition max Rate")
options.addOption("i", "interval", true, "SparkStreaming direct Kafka Batch Interval [s]")
//hive || phoenix
// options.addOption("hd", "hive.database", true, "FirstScanSinkHive hive database")
// options.addOption("ht", "hive.table", true, "FirstScanSinkHive hive table")
options.addOption("pn", "phoenix.namespace", true, "FirstScanSinkPhoenix phoenix namespace")
options.addOption("pt", "phoenix.table", true, "FirstSinkPhoenix phoenix table")
options.addOption("hp", "hp", true, " write in hive still hbase h=hive p=phoenix hp=hive and phoenix")
options.addOption("local", "local", true, " Is it enabled? loncal(*)")
private val parser: PosixParser = new PosixParser
def parseParam(args: Array[String], applicationName: String): ExecutorParam = {
if (args == null) {
logger.error("HR => param is NULL")
System.exit(ERROR_EXIT_STATUS)
}
var line: CommandLine = null
try {
line = parser.parse(options, args)
} catch {
case e: Exception => logger.error("HR => args parse Exception. parse check param")
println(e.getStackTrace.mkString("Array(", ", ", ")"))
System.exit(ERROR_EXIT_STATUS)
}
if (line.hasOption("h")) {
new HelpFormatter().printHelp("desc", options)
System.exit(ERROR_EXIT_STATUS)
}
val topic: String = line.getOptionValue("topic")
val groupId: String = line.getOptionValue("group.id")
val maxRatePerPartition: String = line.getOptionValue("maxRatePerPartition", "1000")
val interval: String = line.getOptionValue("interval")
val hp: String = line.getOptionValue("hp")
val local: String = line.getOptionValue("local")
if (topic == null || groupId == null || interval == null || hp == null || local == null) {
logger.error(s"HR => topic|groupId|interval Exception.parse check param")
System.exit(ERROR_EXIT_STATUS)
}
logger.info(s"HR => paramList[topic : $topic \n groupId : $groupId \n maxRatePerPartition : $maxRatePerPartition \n interval : $interval]")
val phoenixNameSpace: String = line.getOptionValue("phoenix.namespace")
val phoenixTable: String = line.getOptionValue("phoenix.table")
logger.info(s"HR => paramList[ phoenix.namespace : $phoenixNameSpace \n phoenix.table : $phoenixTable]")
// val hiveDatabase: String = line.getOptionValue("hive.database")
// val hiveTable: String = line.getOptionValue("hive.table")
// logger.info(s"HR => paramList[ hive.database : $hiveDatabase \n hive.table : $hiveTable]")
ExecutorParam(topic, groupId, maxRatePerPartition.toInt, interval.toInt, applicationName + hp, phoenixNameSpace, phoenixTable, hp, local.toBoolean)
}
}
case class ExecutorParam(
topic: String,
groupId: String,
maxRatePerPartition: Int,
interval: Int,
applicationName: String,
phoenixNameSpace: String,
phoenixTable: String,
hp: String,
local: Boolean)
How to transmit reference - Example
-maxR 500 \
-t hr_task_scan \
-i 3 \
-g SplitTaskTypeUOFFICIAL \
-pn OFFICIAL \
-pt SAMPLE_U_SCAN,SAMPLE_U_SCAN_LATEST,SAMPLE_T_SCAN_LATEST \
-local false
How to get parameters - Example
// Analytical parameters
val param: ExecutorParam = Constant.parseParam(args, this.getClass.getSimpleName)
// To obtain parameters
val applicationName: String = param.applicationName
val maxRatePerPartition: Int = param.maxRatePerPartition
val conf: SparkConf = new SparkConf()
val topic: String = param.topic
val groupId: String = param.groupId
val local: Boolean = param.local
边栏推荐
猜你喜欢

Is the sub database and sub table really suitable for your system? Talk about how to select sub databases, sub tables and newsql

1.赋值语句

数据库范式与模式分解

百度地图鹰眼轨迹服务

想放弃软件测试了,4年经验去面试10分钟结束,测试现在这么难了?

three文档使用

【循环语句】

本地提权的学习

Tsinghua, air, Tencent | 3D isovariant molecular map pre training

分库分表真的适合你的系统吗?聊聊分库分表和NewSQL如何选择
随机推荐
Is it safe for CITIC futures to open an account online and will it be cheated?
清华、AIR、腾讯 | 3D等变分子图预训练
【PyTorch】cuda()与to(device)的区别
《nlp入门+实战:第一章:深度学习和神经网络》
软件质量管理实践全面总结
js div 滚动到底部
Fireshare for short video sharing
phpcms实现产品多条件筛选功能
Several important problems of port completion
Interviewer: explain the core principle of ThreadLocal
insert引起的db file sequential read之改善
args参数解析
数据库范式与模式分解
Realize multi-level conditional query (similar to JD multi-level add query)
解密 Redis 助力双 11 背后电商秒杀系统
Baidu map eagle eye track service
60道测开面试题,背完直接涨工资
Kill a process on Linux
实现方法pathListToMap,能够将输入的pathList转化为具有层级结构的map类型
60 open-ended test questions, recite them and get a pay rise directly