import com.alibaba.fastjson.JSON; import com..common.ProjectConfig; import java.io.File; import java.io.FileInputStream; import java.io.FileNotFoundException; import java.util.HashMap; import java.util.Map; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.yaml.snakeyaml.Yaml; public class ConfigUtil { private static final Logger log = LoggerFactory.getLogger(ConfigUtil.class); private ConfigUtil() { } public static Map readYaml(String file) { if (!(new File(file)).exists()) { file = ProjectConfig.getProjectConfDir() + File.separator + file; } try { return (Map)(new Yaml()).loadAs(new FileInputStream(file), HashMap.class); } catch (FileNotFoundException var2) { log.error("文件读取异常", var2); return null; } } public static <T> T readYamlByPrefix(String file, String prefix, Class<T> clazz) { return JSON.parseObject(JSON.toJSONString(readYaml(file).get(prefix)), clazz); } public static Map<String, Object> readYamlByPrefix(String file, String prefix) { return (Map)readYaml(file).get(prefix); } public static void main(String[] args) { System.out.println(); } }
import com.alibaba.fastjson.JSON; import com.google.common.base.Preconditions; import com..common.constant.CommonConstant; import com..common.entity.EsConfigEntity; import com..common.entity.KafkaConfigEntity; import com..common.tool.ConfigUtil; import com..common.tool.FileUtil; import java.io.File; import java.io.FileInputStream; import java.io.IOException; import java.util.Arrays; import java.util.List; import java.util.Map; import java.util.Optional; import java.util.Properties; import java.util.function.Function; import java.util.function.Predicate; import org.apache.commons.lang3.StringUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; public class ProjectConfig { private static final Logger log = LoggerFactory.getLogger(ProjectConfig.class); private static final String SPARK_BATCH_CONF_FILE = "spark-batch.properties"; private static final String SPARK_STREAMING_CONF_FILE = "spark-streaming.properties"; private static final String JOB_CONF_FILE = "job.yaml"; private static final String SERVICE_CONF_FILE = "service.yaml"; private static final String COMMON_CONF_FILE = "application.yaml"; private static Map<String, String> sparkBatchConf = loadConfig("spark-batch.properties"); private static Map<String, String> sparkStreamingConf = loadConfig("spark-streaming.properties"); public ProjectConfig() { } public static String getProjectHome() { return System.getenv(CommonConstant.PROJECT_HOME); } public static String getProjectConfDir() { return System.getenv(CommonConstant.PROJECT_HOME) + File.separator + "conf"; } public static String getDpsJarFile() { return System.getenv(CommonConstant.PROJECT_HOME) + File.separator + "jars" + File.separator + "libra-dps.jar"; } public static String getCommonConfFile() { return getProjectConfDir() + File.separator + "application.yaml"; } public static Map<String, String> getSparkBatchConf() { return sparkBatchConf; } public static Map<String, String> getSparkStreamingConf() { return sparkStreamingConf; } public static EsConfigEntity getESConf() { return (EsConfigEntity)JSON.parseObject(JSON.toJSONString(ConfigUtil.readYamlByPrefix("application.yaml", "es")), EsConfigEntity.class); } public static KafkaConfigEntity getKafkaConf() { return (KafkaConfigEntity)JSON.parseObject(JSON.toJSONString(ConfigUtil.readYamlByPrefix("application.yaml", "kafka")), KafkaConfigEntity.class); } public static String getIndexLimit() { return ConfigUtil.readYamlByPrefix("application.yaml", "es").get("splitLimitNum").toString(); } public static Object getCommonValue(String key) { return ConfigUtil.readYaml("application.yaml").get(key); } public static Map<String, Object> getRedisConf() { return ConfigUtil.readYamlByPrefix("application.yaml", "redis"); } public static Object getStreamingJobValue(String key) { return ConfigUtil.readYaml("job.yaml").get(key); } public static Map getStreamingJobConf() { return ConfigUtil.readYaml("job.yaml"); } public static Map getStreamingServiceConf() { return ConfigUtil.readYaml("service.yaml"); } public static List<String> getProtocols() { return Arrays.asList(ConfigUtil.readYaml("application.yaml").get("protocol").toString().split(",")); } public static Optional<String> getEsMapping(String indexName) { Preconditions.checkArgument(StringUtils.isNotBlank(indexName)); File mappingDir = new File(getProjectConfDir() + File.separator + "mapping"); return mappingDir.isDirectory() ? (Optional)Arrays.asList(mappingDir.listFiles()).stream().filter((file) -> { return indexName.equals(file.getName().split("\.")[0]); }).map((file) -> { return FileUtil.fileToString(mappingDir + File.separator + file.getName()); }).findAny().get() : FileUtil.fileToString(indexName + ".json"); } public static Map loadConfig(String fileName) { Preconditions.checkArgument(StringUtils.isNotBlank(fileName)); String confPath = getProjectConfDir(); File file = new File(confPath.concat(File.separator).concat(fileName)); if (!file.exists()) { file = new File(fileName); } Preconditions.checkArgument(file.exists(), "未找到配置文件"); Properties properties = new Properties(); try { properties.load(new FileInputStream(file)); return properties; } catch (IOException var5) { log.error("读取配置文件异常:{}/{}", new Object[]{confPath, fileName, var5}); throw ExceptionUtil.buildRuntimeException(); } } }