--------------日志使用
1.maven依赖
<dependency>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-log4j12</artifactId>
<version>1.7.25</version>
</dependency>
<dependency>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-api</artifactId>
<version>1.7.25</version>
</dependency>
2.代码案例
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class TestClassName {
private static final String name="itXiaoBai";
private static final Logger logger=LoggerFactory.getLogger(TestClassName.class);
public static void main(String[] args) {
logger.info("this is: {}",name);
}
}
2018-12-03 21:53:17,461 INFO [test.TestClassName] - this is: itXiaoBai
Process finished with exit code 0
--------------@Date--pojo的set/get方法
1.maven依赖
<dependency>
<groupId>org.projectlombok</groupId>
<artifactId>lombok</artifactId>
<version>1.16.20</version>
</dependency>
2.代码案例
---------------HTTP获取URL中的内容&&IOUtils直接将流toString成字符串
1.maven依赖
<dependency>
<groupId>org.apache.httpcomponents</groupId>
<artifactId>httpclient</artifactId>
<version>4.5.4</version>
</dependency>
<dependency>
<groupId>commons-io</groupId>
<artifactId>commons-io</artifactId>
<version>2.5</version>
</dependency>
2.代码案例
import org.apache.commons.io.IOUtils;
import org.apache.http.client.methods.CloseableHttpResponse;
import org.apache.http.client.methods.HttpGet;
import org.apache.http.impl.client.SystemDefaultHttpClient;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.IOException;
import java.io.InputStream;
public class GetDateByURL {
private static final Logger logger=LoggerFactory.getLogger(GetDateByURL.class);
public static void main(String[] args) throws IOException {
String url="https://blog.csdn.net/qq_38617531";
HttpGet httpGet = new HttpGet(url);
CloseableHttpResponse response = new SystemDefaultHttpClient().execute(httpGet);
InputStream content = response.getEntity().getContent();
String string = IOUtils.toString(content);
logger.info("content:{}",string);
}
}
---------解析json字符串
1.maven依赖
<dependency>
<groupId>com.google.code.gson</groupId>
<artifactId>gson</artifactId>
<version>2.8.2</version>
</dependency>
2.代码案例
import com.google.gson.JsonObject;
import com.google.gson.JsonParser;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* 解析json字符串
*/
public class AnalyzeJson {
private static final Logger logger=LoggerFactory.getLogger(AnalyzeJson.class);
public static void main(String[] args) {
String jsonStr="{
" +
" "beans" : [ {
" +
" "name" : "java.lang:type=OperatingSystem",
" +
" "modelerType" : "sun.management.OperatingSystemImpl",
" +
" "OpenFileDescriptorCount" : 289,
" +
" "MaxFileDescriptorCount" : 4096,
" +
" "CommittedVirtualMemorySize" : 2829778944,
" +
" "TotalSwapSpaceSize" : 1073737728,
" +
" "FreeSwapSpaceSize" : 408305664,
" +
" "ProcessCpuTime" : 85530000000,
" +
" "FreePhysicalMemorySize" : 89407488,
" +
" "TotalPhysicalMemorySize" : 1028235264,
" +
" "SystemCpuLoad" : 1.0,
" +
" "ProcessCpuLoad" : 0.0,
" +
" "AvailableProcessors" : 1,
" +
" "Version" : "2.6.32-642.el6.x86_64",
" +
" "Arch" : "amd64",
" +
" "SystemLoadAverage" : 1.36,
" +
" "Name" : "Linux",
" +
" "ObjectName" : "java.lang:type=OperatingSystem"
" +
" } ]
" +
"}";
JsonObject jsonObject = new JsonParser().parse(jsonStr).getAsJsonObject();
double systemCpuLoad = jsonObject.getAsJsonArray("beans")
.get(0)
.getAsJsonObject()
.get("SystemCpuLoad")
.getAsDouble();
logger.info("systemCpuLoad:{}",systemCpuLoad);
}
}
-----------------------加载xml配置文件
1.maven依赖
<dependency>
<groupId>dom4j</groupId>
<artifactId>dom4j</artifactId>
<version>1.6.1</version>
</dependency>
2.部分代码案例
test.xml
<?xml version="1.0" encoding="utf-8"?>
<tasks>
<task>
<name>cpuload</name>
<type>hdfs</type>
<period>3</period>
<class>task.hdfs.CpuLoadTask</class>
</task>
</tasks>
//加载配置文件
SAXReader reader = new SAXReader();
InputStream inputStream = App.class.getClassLoader().getResourceAsStream("test.xml");
//表示是task.xml
Document document = reader.read(inputStream);
Element root = document.getRootElement();
List<Element> childElements = root.elements();
for (Element child : childElements) {
String period = child.element("period").getTextTrim();
String className = child.element("class").getTextTrim();
String type = child.element("type").getTextTrim();
String name = child.element("name").getTextTrim();
//通过反射机制 实例化对象
MonitorTask instance = (MonitorTask) Class.forName(className).newInstance();
//传入configuration,初始化
Configuration conf = new SystemConfiguration();
conf.setProperty("period", Long.valueOf(period));
conf.setProperty("type", type);
conf.setProperty("name", name);
instance.init(conf);
-----------定时调度线程池
1.代码案例
import java.util.concurrent.Executors;
import java.util.concurrent.ScheduledExecutorService;
import java.util.concurrent.TimeUnit;
public class ScheduledExecutorTest {
private ScheduledExecutorService scheduExec;
public long start;
ScheduledExecutorTest(){
this.scheduExec = Executors.newScheduledThreadPool(15);
this.start = System.currentTimeMillis();
}
public void timer(){
scheduExec.scheduleAtFixedRate(new Runnable() {
public void run() {
System.out.println("timer invoked .....");
}
//开始时间,间隔,时间类型
/**
* 2000:表示 2秒之后开始
* 5000:表示 5秒执行一次
* TimeUnit.MILLISECONDS:表示以毫秒为单位
*/
},2000,5000,TimeUnit.MILLISECONDS);
}
public static void main(String[] args) {
ScheduledExecutorTest test = new ScheduledExecutorTest();
test.timer();
}
}
------------通过加载配置文件,加载配置项
1.配置文件---kafka.properties
bootstrap.servers=192.168.10.3:9092
acks=all
retries=3
batch.size=5000000
linger.ms=6
buffer.memory=33554432
compression.type=gzip
key.serializer=org.apache.kafka.common.serialization.StringSerializer
value.serializer=org.apache.kafka.common.serialization.StringSerializer
auto.create.topics.enable=true
2.代码案例
import org.apache.kafka.clients.producer.KafkaProducer;
import org.apache.kafka.clients.producer.Producer;
import org.apache.kafka.clients.producer.ProducerRecord;
import java.io.IOException;
import java.util.Properties;
public class KafkaUtil {
private static Producer<String, String> producer = null;
static {
Properties properties = new Properties();
try {
//通过加载配置文件,加载配置项
properties.load(KafkaUtil.class.getClassLoader().getResourceAsStream("kafka.properties"));
producer = new KafkaProducer<String, String>(properties);
} catch (IOException e) {
e.printStackTrace();
}
}
public static void send(String content, String topic) {
producer.send(new ProducerRecord<String, String>(topic, "", content));
}
}
-----------------