@ -0,0 +1 @@
|
||||
pyg
|
@ -0,0 +1,2 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<module type="JAVA_MODULE" version="4" />
|
@ -0,0 +1,5 @@
|
||||
<component name="ProjectCodeStyleConfiguration">
|
||||
<state>
|
||||
<option name="PREFERRED_PROJECT_CODE_STYLE" value="Default" />
|
||||
</state>
|
||||
</component>
|
@ -0,0 +1,17 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<project version="4">
|
||||
<component name="CompilerConfiguration">
|
||||
<annotationProcessing>
|
||||
<profile name="Maven default annotation processors profile" enabled="true">
|
||||
<sourceOutputDir name="target/generated-sources/annotations" />
|
||||
<sourceTestOutputDir name="target/generated-test-sources/test-annotations" />
|
||||
<outputRelativeToContentRoot value="true" />
|
||||
<module name="batch-process" />
|
||||
<module name="canal-kafka" />
|
||||
<module name="real-process" />
|
||||
<module name="report" />
|
||||
<module name="sync-db" />
|
||||
</profile>
|
||||
</annotationProcessing>
|
||||
</component>
|
||||
</project>
|
@ -0,0 +1,6 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<project version="4">
|
||||
<component name="Encoding">
|
||||
<file url="file://$PROJECT_DIR$/report" charset="UTF-8" />
|
||||
</component>
|
||||
</project>
|
@ -0,0 +1,9 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<project version="4">
|
||||
<component name="HydraSettings">
|
||||
<option name="hydraStorePath" value="E:\IdeaProject\Flink-pyg\.hydra\idea" />
|
||||
<option name="noOfCores" value="4" />
|
||||
<option name="projectRoot" value="E:\IdeaProject\Flink-pyg" />
|
||||
<option name="sourcePartitioner" value="auto" />
|
||||
</component>
|
||||
</project>
|
@ -0,0 +1,17 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<project version="4">
|
||||
<component name="ExternalStorageConfigurationManager" enabled="true" />
|
||||
<component name="JavaScriptSettings">
|
||||
<option name="languageLevel" value="ES6" />
|
||||
</component>
|
||||
<component name="MavenProjectsManager">
|
||||
<option name="originalFiles">
|
||||
<list>
|
||||
<option value="$PROJECT_DIR$/pom.xml" />
|
||||
</list>
|
||||
</option>
|
||||
</component>
|
||||
<component name="ProjectRootManager" version="2" languageLevel="JDK_1_8" project-jdk-name="1.8" project-jdk-type="JavaSDK">
|
||||
<output url="file://$PROJECT_DIR$/out" />
|
||||
</component>
|
||||
</project>
|
@ -0,0 +1,124 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<project version="4">
|
||||
<component name="Palette2">
|
||||
<group name="Swing">
|
||||
<item class="com.intellij.uiDesigner.HSpacer" tooltip-text="Horizontal Spacer" icon="/com/intellij/uiDesigner/icons/hspacer.png" removable="false" auto-create-binding="false" can-attach-label="false">
|
||||
<default-constraints vsize-policy="1" hsize-policy="6" anchor="0" fill="1" />
|
||||
</item>
|
||||
<item class="com.intellij.uiDesigner.VSpacer" tooltip-text="Vertical Spacer" icon="/com/intellij/uiDesigner/icons/vspacer.png" removable="false" auto-create-binding="false" can-attach-label="false">
|
||||
<default-constraints vsize-policy="6" hsize-policy="1" anchor="0" fill="2" />
|
||||
</item>
|
||||
<item class="javax.swing.JPanel" icon="/com/intellij/uiDesigner/icons/panel.png" removable="false" auto-create-binding="false" can-attach-label="false">
|
||||
<default-constraints vsize-policy="3" hsize-policy="3" anchor="0" fill="3" />
|
||||
</item>
|
||||
<item class="javax.swing.JScrollPane" icon="/com/intellij/uiDesigner/icons/scrollPane.png" removable="false" auto-create-binding="false" can-attach-label="true">
|
||||
<default-constraints vsize-policy="7" hsize-policy="7" anchor="0" fill="3" />
|
||||
</item>
|
||||
<item class="javax.swing.JButton" icon="/com/intellij/uiDesigner/icons/button.png" removable="false" auto-create-binding="true" can-attach-label="false">
|
||||
<default-constraints vsize-policy="0" hsize-policy="3" anchor="0" fill="1" />
|
||||
<initial-values>
|
||||
<property name="text" value="Button" />
|
||||
</initial-values>
|
||||
</item>
|
||||
<item class="javax.swing.JRadioButton" icon="/com/intellij/uiDesigner/icons/radioButton.png" removable="false" auto-create-binding="true" can-attach-label="false">
|
||||
<default-constraints vsize-policy="0" hsize-policy="3" anchor="8" fill="0" />
|
||||
<initial-values>
|
||||
<property name="text" value="RadioButton" />
|
||||
</initial-values>
|
||||
</item>
|
||||
<item class="javax.swing.JCheckBox" icon="/com/intellij/uiDesigner/icons/checkBox.png" removable="false" auto-create-binding="true" can-attach-label="false">
|
||||
<default-constraints vsize-policy="0" hsize-policy="3" anchor="8" fill="0" />
|
||||
<initial-values>
|
||||
<property name="text" value="CheckBox" />
|
||||
</initial-values>
|
||||
</item>
|
||||
<item class="javax.swing.JLabel" icon="/com/intellij/uiDesigner/icons/label.png" removable="false" auto-create-binding="false" can-attach-label="false">
|
||||
<default-constraints vsize-policy="0" hsize-policy="0" anchor="8" fill="0" />
|
||||
<initial-values>
|
||||
<property name="text" value="Label" />
|
||||
</initial-values>
|
||||
</item>
|
||||
<item class="javax.swing.JTextField" icon="/com/intellij/uiDesigner/icons/textField.png" removable="false" auto-create-binding="true" can-attach-label="true">
|
||||
<default-constraints vsize-policy="0" hsize-policy="6" anchor="8" fill="1">
|
||||
<preferred-size width="150" height="-1" />
|
||||
</default-constraints>
|
||||
</item>
|
||||
<item class="javax.swing.JPasswordField" icon="/com/intellij/uiDesigner/icons/passwordField.png" removable="false" auto-create-binding="true" can-attach-label="true">
|
||||
<default-constraints vsize-policy="0" hsize-policy="6" anchor="8" fill="1">
|
||||
<preferred-size width="150" height="-1" />
|
||||
</default-constraints>
|
||||
</item>
|
||||
<item class="javax.swing.JFormattedTextField" icon="/com/intellij/uiDesigner/icons/formattedTextField.png" removable="false" auto-create-binding="true" can-attach-label="true">
|
||||
<default-constraints vsize-policy="0" hsize-policy="6" anchor="8" fill="1">
|
||||
<preferred-size width="150" height="-1" />
|
||||
</default-constraints>
|
||||
</item>
|
||||
<item class="javax.swing.JTextArea" icon="/com/intellij/uiDesigner/icons/textArea.png" removable="false" auto-create-binding="true" can-attach-label="true">
|
||||
<default-constraints vsize-policy="6" hsize-policy="6" anchor="0" fill="3">
|
||||
<preferred-size width="150" height="50" />
|
||||
</default-constraints>
|
||||
</item>
|
||||
<item class="javax.swing.JTextPane" icon="/com/intellij/uiDesigner/icons/textPane.png" removable="false" auto-create-binding="true" can-attach-label="true">
|
||||
<default-constraints vsize-policy="6" hsize-policy="6" anchor="0" fill="3">
|
||||
<preferred-size width="150" height="50" />
|
||||
</default-constraints>
|
||||
</item>
|
||||
<item class="javax.swing.JEditorPane" icon="/com/intellij/uiDesigner/icons/editorPane.png" removable="false" auto-create-binding="true" can-attach-label="true">
|
||||
<default-constraints vsize-policy="6" hsize-policy="6" anchor="0" fill="3">
|
||||
<preferred-size width="150" height="50" />
|
||||
</default-constraints>
|
||||
</item>
|
||||
<item class="javax.swing.JComboBox" icon="/com/intellij/uiDesigner/icons/comboBox.png" removable="false" auto-create-binding="true" can-attach-label="true">
|
||||
<default-constraints vsize-policy="0" hsize-policy="2" anchor="8" fill="1" />
|
||||
</item>
|
||||
<item class="javax.swing.JTable" icon="/com/intellij/uiDesigner/icons/table.png" removable="false" auto-create-binding="true" can-attach-label="false">
|
||||
<default-constraints vsize-policy="6" hsize-policy="6" anchor="0" fill="3">
|
||||
<preferred-size width="150" height="50" />
|
||||
</default-constraints>
|
||||
</item>
|
||||
<item class="javax.swing.JList" icon="/com/intellij/uiDesigner/icons/list.png" removable="false" auto-create-binding="true" can-attach-label="false">
|
||||
<default-constraints vsize-policy="6" hsize-policy="2" anchor="0" fill="3">
|
||||
<preferred-size width="150" height="50" />
|
||||
</default-constraints>
|
||||
</item>
|
||||
<item class="javax.swing.JTree" icon="/com/intellij/uiDesigner/icons/tree.png" removable="false" auto-create-binding="true" can-attach-label="false">
|
||||
<default-constraints vsize-policy="6" hsize-policy="6" anchor="0" fill="3">
|
||||
<preferred-size width="150" height="50" />
|
||||
</default-constraints>
|
||||
</item>
|
||||
<item class="javax.swing.JTabbedPane" icon="/com/intellij/uiDesigner/icons/tabbedPane.png" removable="false" auto-create-binding="true" can-attach-label="false">
|
||||
<default-constraints vsize-policy="3" hsize-policy="3" anchor="0" fill="3">
|
||||
<preferred-size width="200" height="200" />
|
||||
</default-constraints>
|
||||
</item>
|
||||
<item class="javax.swing.JSplitPane" icon="/com/intellij/uiDesigner/icons/splitPane.png" removable="false" auto-create-binding="false" can-attach-label="false">
|
||||
<default-constraints vsize-policy="3" hsize-policy="3" anchor="0" fill="3">
|
||||
<preferred-size width="200" height="200" />
|
||||
</default-constraints>
|
||||
</item>
|
||||
<item class="javax.swing.JSpinner" icon="/com/intellij/uiDesigner/icons/spinner.png" removable="false" auto-create-binding="true" can-attach-label="true">
|
||||
<default-constraints vsize-policy="0" hsize-policy="6" anchor="8" fill="1" />
|
||||
</item>
|
||||
<item class="javax.swing.JSlider" icon="/com/intellij/uiDesigner/icons/slider.png" removable="false" auto-create-binding="true" can-attach-label="false">
|
||||
<default-constraints vsize-policy="0" hsize-policy="6" anchor="8" fill="1" />
|
||||
</item>
|
||||
<item class="javax.swing.JSeparator" icon="/com/intellij/uiDesigner/icons/separator.png" removable="false" auto-create-binding="false" can-attach-label="false">
|
||||
<default-constraints vsize-policy="6" hsize-policy="6" anchor="0" fill="3" />
|
||||
</item>
|
||||
<item class="javax.swing.JProgressBar" icon="/com/intellij/uiDesigner/icons/progressbar.png" removable="false" auto-create-binding="true" can-attach-label="false">
|
||||
<default-constraints vsize-policy="0" hsize-policy="6" anchor="0" fill="1" />
|
||||
</item>
|
||||
<item class="javax.swing.JToolBar" icon="/com/intellij/uiDesigner/icons/toolbar.png" removable="false" auto-create-binding="false" can-attach-label="false">
|
||||
<default-constraints vsize-policy="0" hsize-policy="6" anchor="0" fill="1">
|
||||
<preferred-size width="-1" height="20" />
|
||||
</default-constraints>
|
||||
</item>
|
||||
<item class="javax.swing.JToolBar$Separator" icon="/com/intellij/uiDesigner/icons/toolbarSeparator.png" removable="false" auto-create-binding="false" can-attach-label="false">
|
||||
<default-constraints vsize-policy="0" hsize-policy="0" anchor="0" fill="1" />
|
||||
</item>
|
||||
<item class="javax.swing.JScrollBar" icon="/com/intellij/uiDesigner/icons/scrollbar.png" removable="false" auto-create-binding="true" can-attach-label="false">
|
||||
<default-constraints vsize-policy="6" hsize-policy="0" anchor="0" fill="2" />
|
||||
</item>
|
||||
</group>
|
||||
</component>
|
||||
</project>
|
@ -0,0 +1,6 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<project version="4">
|
||||
<component name="VcsDirectoryMappings">
|
||||
<mapping directory="" vcs="Git" />
|
||||
</component>
|
||||
</project>
|
@ -0,0 +1,15 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<project xmlns="http://maven.apache.org/POM/4.0.0"
|
||||
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
|
||||
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
|
||||
<parent>
|
||||
<artifactId>pyg</artifactId>
|
||||
<groupId>com.henry</groupId>
|
||||
<version>1.0-SNAPSHOT</version>
|
||||
</parent>
|
||||
<modelVersion>4.0.0</modelVersion>
|
||||
|
||||
<artifactId>batch-process</artifactId>
|
||||
|
||||
|
||||
</project>
|
@ -0,0 +1,35 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<project xmlns="http://maven.apache.org/POM/4.0.0"
|
||||
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
|
||||
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
|
||||
<parent>
|
||||
<artifactId>pyg</artifactId>
|
||||
<groupId>com.henry</groupId>
|
||||
<version>1.0-SNAPSHOT</version>
|
||||
</parent>
|
||||
<modelVersion>4.0.0</modelVersion>
|
||||
|
||||
<artifactId>canal-kafka</artifactId>
|
||||
|
||||
<dependencies>
|
||||
<dependency>
|
||||
<groupId>com.alibaba.otter</groupId>
|
||||
<artifactId>canal.client</artifactId>
|
||||
<version>1.0.24</version>
|
||||
</dependency>
|
||||
<!-- https://mvnrepository.com/artifact/org.apache.kafka/kafka -->
|
||||
<dependency>
|
||||
<groupId>org.apache.kafka</groupId>
|
||||
<artifactId>kafka_2.11</artifactId>
|
||||
<version>0.10.1.0</version>
|
||||
</dependency>
|
||||
<!--对象和json 互相转换的-->
|
||||
<dependency>
|
||||
<groupId>com.alibaba</groupId>
|
||||
<artifactId>fastjson</artifactId>
|
||||
<version>1.2.83</version>
|
||||
</dependency>
|
||||
</dependencies>
|
||||
|
||||
|
||||
</project>
|
@ -0,0 +1,30 @@
|
||||
package com.henry.canal_kafka.util;
|
||||
|
||||
import java.util.ResourceBundle;
|
||||
|
||||
public class GlobalConfigUtil {
|
||||
// 获取一个资源加载器
|
||||
// 资源加载器会自动去加载CLASSPATH中的application.properties配置文件
|
||||
private static ResourceBundle resourceBundle = ResourceBundle.getBundle("application");
|
||||
|
||||
// 使用ResourceBundle.getString方法来读取配置
|
||||
public static String canalHost = resourceBundle.getString("canal.host");
|
||||
public static String canalPort = resourceBundle.getString("canal.port");
|
||||
public static String canalInstance = resourceBundle.getString("canal.instance");
|
||||
public static String mysqlUsername = resourceBundle.getString("mysql.username");
|
||||
public static String mysqlPassword = resourceBundle.getString("mysql.password");
|
||||
public static String kafkaBootstrapServers = resourceBundle.getString("kafka.bootstrap.servers");
|
||||
public static String kafkaZookeeperConnect = resourceBundle.getString("kafka.zookeeper.connect");
|
||||
public static String kafkaInputTopic = resourceBundle.getString("kafka.input.topic");
|
||||
|
||||
public static void main(String[] args) {
|
||||
System.out.println(canalHost);
|
||||
System.out.println(canalPort);
|
||||
System.out.println(canalInstance);
|
||||
System.out.println(mysqlUsername);
|
||||
System.out.println(mysqlPassword);
|
||||
System.out.println(kafkaBootstrapServers);
|
||||
System.out.println(kafkaZookeeperConnect);
|
||||
System.out.println(kafkaInputTopic);
|
||||
}
|
||||
}
|
@ -0,0 +1,42 @@
|
||||
package com.henry.canal_kafka.util;
|
||||
|
||||
import kafka.javaapi.producer.Producer;
|
||||
import kafka.producer.KeyedMessage;
|
||||
import kafka.producer.ProducerConfig;
|
||||
import kafka.serializer.StringEncoder;
|
||||
|
||||
import java.util.Properties;
|
||||
|
||||
/**
|
||||
* Kafka生产消息工具类
|
||||
*/
|
||||
public class KafkaSender {
|
||||
private String topic;
|
||||
|
||||
public KafkaSender(String topic){
|
||||
super();
|
||||
this.topic = topic;
|
||||
}
|
||||
|
||||
/**
|
||||
* 发送消息到Kafka指定topic
|
||||
*
|
||||
* @param topic topic名字
|
||||
* @param key 键值
|
||||
* @param data 数据
|
||||
*/
|
||||
public static void sendMessage(String topic , String key , String data){
|
||||
Producer<String, String> producer = createProducer();
|
||||
producer.send(new KeyedMessage<String , String>(topic , key , data));
|
||||
}
|
||||
|
||||
private static Producer<String , String> createProducer(){
|
||||
Properties properties = new Properties();
|
||||
|
||||
properties.put("metadata.broker.list" , GlobalConfigUtil.kafkaBootstrapServers);
|
||||
properties.put("zookeeper.connect" , GlobalConfigUtil.kafkaZookeeperConnect);
|
||||
properties.put("serializer.class" , StringEncoder.class.getName());
|
||||
|
||||
return new Producer<String, String>(new ProducerConfig(properties));
|
||||
}
|
||||
}
|
@ -0,0 +1,14 @@
|
||||
#
|
||||
# canal\u914D\u7F6E
|
||||
#
|
||||
canal.host=master
|
||||
canal.port=11111
|
||||
canal.instance=example
|
||||
mysql.username=root
|
||||
mysql.password=123456
|
||||
#
|
||||
#kafka\u7684\u914D\u7F6E
|
||||
#
|
||||
kafka.bootstrap.servers=master:9092,slave1:9092,slave2:9092
|
||||
kafka.zookeeper.connect=master:2181,slave1:2181,slave2:2181
|
||||
kafka.input.topic=canal
|
@ -0,0 +1,4 @@
|
||||
log4j.rootLogger=error,stdout
|
||||
log4j.appender.stdout=org.apache.log4j.ConsoleAppender
|
||||
log4j.appender.stdout.layout=org.apache.log4j.PatternLayout
|
||||
log4j.appender.stdout.layout.ConversionPattern=%5p - %m%n
|
@ -0,0 +1,20 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<project xmlns="http://maven.apache.org/POM/4.0.0"
|
||||
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
|
||||
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
|
||||
<modelVersion>4.0.0</modelVersion>
|
||||
|
||||
<groupId>com.henry</groupId>
|
||||
<artifactId>pyg</artifactId>
|
||||
<version>1.0-SNAPSHOT</version>
|
||||
<packaging>pom</packaging>
|
||||
|
||||
<modules>
|
||||
<module>report</module> <!--上报服务-->
|
||||
<module>real-process</module> <!--实时处理-->
|
||||
<module>canal-kafka</module> <!--数据采集-->
|
||||
<module>sync-db</module> <!--数据库同步处理-->
|
||||
<module>batch-process</module> <!--批处理-->
|
||||
</modules>
|
||||
|
||||
</project>
|
@ -0,0 +1,2 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<module type="JAVA_MODULE" version="4" />
|
@ -0,0 +1,296 @@
|
||||
# Licensed to the Apache Software Foundation (ASF) under one
|
||||
# or more contributor license agreements. See the NOTICE file
|
||||
# distributed with this work for additional information
|
||||
# regarding copyright ownership. The ASF licenses this file
|
||||
# to you under the Apache License, Version 2.0 (the
|
||||
# "License"); you may not use this file except in compliance
|
||||
# with the License. You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
# Define some default values that can be overridden by system properties
|
||||
hadoop.root.logger=ERROR,console
|
||||
hadoop.log.dir=.
|
||||
hadoop.log.file=hadoop.log
|
||||
|
||||
# Define the root logger to the system property "hadoop.root.logger".
|
||||
log4j.rootLogger=${hadoop.root.logger}, EventCounter
|
||||
|
||||
# Logging Threshold
|
||||
log4j.threshold=ALL
|
||||
|
||||
# Null Appender
|
||||
log4j.appender.NullAppender=org.apache.log4j.varia.NullAppender
|
||||
|
||||
#
|
||||
# Rolling File Appender - cap space usage at 5gb.
|
||||
#
|
||||
hadoop.log.maxfilesize=256MB
|
||||
hadoop.log.maxbackupindex=20
|
||||
log4j.appender.RFA=org.apache.log4j.RollingFileAppender
|
||||
log4j.appender.RFA.File=${hadoop.log.dir}/${hadoop.log.file}
|
||||
|
||||
log4j.appender.RFA.MaxFileSize=${hadoop.log.maxfilesize}
|
||||
log4j.appender.RFA.MaxBackupIndex=${hadoop.log.maxbackupindex}
|
||||
|
||||
log4j.appender.RFA.layout=org.apache.log4j.PatternLayout
|
||||
|
||||
# Pattern format: Date LogLevel LoggerName LogMessage
|
||||
log4j.appender.RFA.layout.ConversionPattern=%d{ISO8601} %p %c: %m%n
|
||||
# Debugging Pattern format
|
||||
#log4j.appender.RFA.layout.ConversionPattern=%d{ISO8601} %-5p %c{2} (%F:%M(%L)) - %m%n
|
||||
|
||||
|
||||
#
|
||||
# Daily Rolling File Appender
|
||||
#
|
||||
|
||||
log4j.appender.DRFA=org.apache.log4j.DailyRollingFileAppender
|
||||
log4j.appender.DRFA.File=${hadoop.log.dir}/${hadoop.log.file}
|
||||
|
||||
# Rollver at midnight
|
||||
log4j.appender.DRFA.DatePattern=.yyyy-MM-dd
|
||||
|
||||
# 30-day backup
|
||||
#log4j.appender.DRFA.MaxBackupIndex=30
|
||||
log4j.appender.DRFA.layout=org.apache.log4j.PatternLayout
|
||||
|
||||
# Pattern format: Date LogLevel LoggerName LogMessage
|
||||
log4j.appender.DRFA.layout.ConversionPattern=%d{ISO8601} %p %c: %m%n
|
||||
# Debugging Pattern format
|
||||
#log4j.appender.DRFA.layout.ConversionPattern=%d{ISO8601} %-5p %c{2} (%F:%M(%L)) - %m%n
|
||||
|
||||
|
||||
#
|
||||
# console
|
||||
# Add "console" to rootlogger above if you want to use this
|
||||
#
|
||||
|
||||
log4j.appender.console=org.apache.log4j.ConsoleAppender
|
||||
log4j.appender.console.target=System.err
|
||||
log4j.appender.console.layout=org.apache.log4j.PatternLayout
|
||||
log4j.appender.console.layout.ConversionPattern=%d{yy HH:mm:ss} %p %c{2}: %m%n
|
||||
|
||||
#
|
||||
# TaskLog Appender
|
||||
#
|
||||
|
||||
#Default values
|
||||
hadoop.tasklog.taskid=null
|
||||
hadoop.tasklog.iscleanup=false
|
||||
hadoop.tasklog.noKeepSplits=4
|
||||
hadoop.tasklog.totalLogFileSize=100
|
||||
hadoop.tasklog.purgeLogSplits=true
|
||||
hadoop.tasklog.logsRetainHours=12
|
||||
|
||||
log4j.appender.TLA=org.apache.hadoop.mapred.TaskLogAppender
|
||||
log4j.appender.TLA.taskId=${hadoop.tasklog.taskid}
|
||||
log4j.appender.TLA.isCleanup=${hadoop.tasklog.iscleanup}
|
||||
log4j.appender.TLA.totalLogFileSize=${hadoop.tasklog.totalLogFileSize}
|
||||
|
||||
log4j.appender.TLA.layout=org.apache.log4j.PatternLayout
|
||||
log4j.appender.TLA.layout.ConversionPattern=%d{ISO8601} %p %c: %m%n
|
||||
|
||||
#
|
||||
# HDFS block state change log from block manager
|
||||
#
|
||||
# Uncomment the following to suppress normal block state change
|
||||
# messages from BlockManager in NameNode.
|
||||
#log4j.logger.BlockStateChange=WARN
|
||||
|
||||
#
|
||||
#Security appender
|
||||
#
|
||||
hadoop.security.logger=INFO,NullAppender
|
||||
hadoop.security.log.maxfilesize=256MB
|
||||
hadoop.security.log.maxbackupindex=20
|
||||
log4j.category.SecurityLogger=${hadoop.security.logger}
|
||||
hadoop.security.log.file=SecurityAuth-${user.name}.audit
|
||||
log4j.appender.RFAS=org.apache.log4j.RollingFileAppender
|
||||
log4j.appender.RFAS.File=${hadoop.log.dir}/${hadoop.security.log.file}
|
||||
log4j.appender.RFAS.layout=org.apache.log4j.PatternLayout
|
||||
log4j.appender.RFAS.layout.ConversionPattern=%d{ISO8601} %p %c: %m%n
|
||||
log4j.appender.RFAS.MaxFileSize=${hadoop.security.log.maxfilesize}
|
||||
log4j.appender.RFAS.MaxBackupIndex=${hadoop.security.log.maxbackupindex}
|
||||
|
||||
#
|
||||
# Daily Rolling Security appender
|
||||
#
|
||||
log4j.appender.DRFAS=org.apache.log4j.DailyRollingFileAppender
|
||||
log4j.appender.DRFAS.File=${hadoop.log.dir}/${hadoop.security.log.file}
|
||||
log4j.appender.DRFAS.layout=org.apache.log4j.PatternLayout
|
||||
log4j.appender.DRFAS.layout.ConversionPattern=%d{ISO8601} %p %c: %m%n
|
||||
log4j.appender.DRFAS.DatePattern=.yyyy-MM-dd
|
||||
|
||||
#
|
||||
# hadoop configuration logging
|
||||
#
|
||||
|
||||
# Uncomment the following line to turn off configuration deprecation warnings.
|
||||
# log4j.logger.org.apache.hadoop.conf.Configuration.deprecation=WARN
|
||||
|
||||
#
|
||||
# hdfs audit logging
|
||||
#
|
||||
hdfs.audit.logger=INFO,NullAppender
|
||||
hdfs.audit.log.maxfilesize=256MB
|
||||
hdfs.audit.log.maxbackupindex=20
|
||||
log4j.logger.org.apache.hadoop.hdfs.server.namenode.FSNamesystem.audit=${hdfs.audit.logger}
|
||||
log4j.additivity.org.apache.hadoop.hdfs.server.namenode.FSNamesystem.audit=false
|
||||
log4j.appender.RFAAUDIT=org.apache.log4j.RollingFileAppender
|
||||
log4j.appender.RFAAUDIT.File=${hadoop.log.dir}/hdfs-audit.log
|
||||
log4j.appender.RFAAUDIT.layout=org.apache.log4j.PatternLayout
|
||||
log4j.appender.RFAAUDIT.layout.ConversionPattern=%d{ISO8601} %p %c{2}: %m%n
|
||||
log4j.appender.RFAAUDIT.MaxFileSize=${hdfs.audit.log.maxfilesize}
|
||||
log4j.appender.RFAAUDIT.MaxBackupIndex=${hdfs.audit.log.maxbackupindex}
|
||||
|
||||
#
|
||||
# mapred audit logging
|
||||
#
|
||||
mapred.audit.logger=INFO,NullAppender
|
||||
mapred.audit.log.maxfilesize=256MB
|
||||
mapred.audit.log.maxbackupindex=20
|
||||
log4j.logger.org.apache.hadoop.mapred.AuditLogger=${mapred.audit.logger}
|
||||
log4j.additivity.org.apache.hadoop.mapred.AuditLogger=false
|
||||
log4j.appender.MRAUDIT=org.apache.log4j.RollingFileAppender
|
||||
log4j.appender.MRAUDIT.File=${hadoop.log.dir}/mapred-audit.log
|
||||
log4j.appender.MRAUDIT.layout=org.apache.log4j.PatternLayout
|
||||
log4j.appender.MRAUDIT.layout.ConversionPattern=%d{ISO8601} %p %c{2}: %m%n
|
||||
log4j.appender.MRAUDIT.MaxFileSize=${mapred.audit.log.maxfilesize}
|
||||
log4j.appender.MRAUDIT.MaxBackupIndex=${mapred.audit.log.maxbackupindex}
|
||||
|
||||
# Custom Logging levels
|
||||
|
||||
#log4j.logger.org.apache.hadoop.mapred.JobTracker=DEBUG
|
||||
#log4j.logger.org.apache.hadoop.mapred.TaskTracker=DEBUG
|
||||
#log4j.logger.org.apache.hadoop.hdfs.server.namenode.FSNamesystem.audit=DEBUG
|
||||
|
||||
# Jets3t library
|
||||
log4j.logger.org.jets3t.service.impl.rest.httpclient.RestS3Service=ERROR
|
||||
|
||||
# AWS SDK & S3A FileSystem
|
||||
log4j.logger.com.amazonaws=ERROR
|
||||
log4j.logger.com.amazonaws.http.AmazonHttpClient=ERROR
|
||||
log4j.logger.org.apache.hadoop.fs.s3a.S3AFileSystem=WARN
|
||||
|
||||
#
|
||||
# Event Counter Appender
|
||||
# Sends counts of logging messages at different severity levels to Hadoop Metrics.
|
||||
#
|
||||
log4j.appender.EventCounter=org.apache.hadoop.log.metrics.EventCounter
|
||||
|
||||
#
|
||||
# Job Summary Appender
|
||||
#
|
||||
# Use following logger to send summary to separate file defined by
|
||||
# hadoop.mapreduce.jobsummary.log.file :
|
||||
# hadoop.mapreduce.jobsummary.logger=INFO,JSA
|
||||
#
|
||||
hadoop.mapreduce.jobsummary.logger=${hadoop.root.logger}
|
||||
hadoop.mapreduce.jobsummary.log.file=hadoop-mapreduce.jobsummary.log
|
||||
hadoop.mapreduce.jobsummary.log.maxfilesize=256MB
|
||||
hadoop.mapreduce.jobsummary.log.maxbackupindex=20
|
||||
log4j.appender.JSA=org.apache.log4j.RollingFileAppender
|
||||
log4j.appender.JSA.File=${hadoop.log.dir}/${hadoop.mapreduce.jobsummary.log.file}
|
||||
log4j.appender.JSA.MaxFileSize=${hadoop.mapreduce.jobsummary.log.maxfilesize}
|
||||
log4j.appender.JSA.MaxBackupIndex=${hadoop.mapreduce.jobsummary.log.maxbackupindex}
|
||||
log4j.appender.JSA.layout=org.apache.log4j.PatternLayout
|
||||
log4j.appender.JSA.layout.ConversionPattern=%d{yy/MM/dd HH:mm:ss} %p %c{2}: %m%n
|
||||
log4j.logger.org.apache.hadoop.mapred.JobInProgress$JobSummary=${hadoop.mapreduce.jobsummary.logger}
|
||||
log4j.additivity.org.apache.hadoop.mapred.JobInProgress$JobSummary=false
|
||||
|
||||
#
|
||||
# Yarn ResourceManager Application Summary Log
|
||||
#
|
||||
# Set the ResourceManager summary log filename
|
||||
yarn.server.resourcemanager.appsummary.log.file=rm-appsummary.log
|
||||
# Set the ResourceManager summary log level and appender
|
||||
yarn.server.resourcemanager.appsummary.logger=${hadoop.root.logger}
|
||||
#yarn.server.resourcemanager.appsummary.logger=INFO,RMSUMMARY
|
||||
|
||||
# To enable AppSummaryLogging for the RM,
|
||||
# set yarn.server.resourcemanager.appsummary.logger to
|
||||
# <LEVEL>,RMSUMMARY in hadoop-env.sh
|
||||
|
||||
# Appender for ResourceManager Application Summary Log
|
||||
# Requires the following properties to be set
|
||||
# - hadoop.log.dir (Hadoop Log directory)
|
||||
# - yarn.server.resourcemanager.appsummary.log.file (resource manager app summary log filename)
|
||||
# - yarn.server.resourcemanager.appsummary.logger (resource manager app summary log level and appender)
|
||||
|
||||
log4j.logger.org.apache.hadoop.yarn.server.resourcemanager.RMAppManager$ApplicationSummary=${yarn.server.resourcemanager.appsummary.logger}
|
||||
log4j.additivity.org.apache.hadoop.yarn.server.resourcemanager.RMAppManager$ApplicationSummary=false
|
||||
log4j.appender.RMSUMMARY=org.apache.log4j.RollingFileAppender
|
||||
log4j.appender.RMSUMMARY.File=${hadoop.log.dir}/${yarn.server.resourcemanager.appsummary.log.file}
|
||||
log4j.appender.RMSUMMARY.MaxFileSize=256MB
|
||||
log4j.appender.RMSUMMARY.MaxBackupIndex=20
|
||||
log4j.appender.RMSUMMARY.layout=org.apache.log4j.PatternLayout
|
||||
log4j.appender.RMSUMMARY.layout.ConversionPattern=%d{ISO8601} %p %c{2}: %m%n
|
||||
|
||||
# HS audit log configs
|
||||
#mapreduce.hs.audit.logger=INFO,HSAUDIT
|
||||
#log4j.logger.org.apache.hadoop.mapreduce.v2.hs.HSAuditLogger=${mapreduce.hs.audit.logger}
|
||||
#log4j.additivity.org.apache.hadoop.mapreduce.v2.hs.HSAuditLogger=false
|
||||
#log4j.appender.HSAUDIT=org.apache.log4j.DailyRollingFileAppender
|
||||
#log4j.appender.HSAUDIT.File=${hadoop.log.dir}/hs-audit.log
|
||||
#log4j.appender.HSAUDIT.layout=org.apache.log4j.PatternLayout
|
||||
#log4j.appender.HSAUDIT.layout.ConversionPattern=%d{ISO8601} %p %c{2}: %m%n
|
||||
#log4j.appender.HSAUDIT.DatePattern=.yyyy-MM-dd
|
||||
|
||||
# Http Server Request Logs
|
||||
#log4j.logger.http.requests.namenode=INFO,namenoderequestlog
|
||||
#log4j.appender.namenoderequestlog=org.apache.hadoop.http.HttpRequestLogAppender
|
||||
#log4j.appender.namenoderequestlog.Filename=${hadoop.log.dir}/jetty-namenode-yyyy_mm_dd.log
|
||||
#log4j.appender.namenoderequestlog.RetainDays=3
|
||||
|
||||
#log4j.logger.http.requests.datanode=INFO,datanoderequestlog
|
||||
#log4j.appender.datanoderequestlog=org.apache.hadoop.http.HttpRequestLogAppender
|
||||
#log4j.appender.datanoderequestlog.Filename=${hadoop.log.dir}/jetty-datanode-yyyy_mm_dd.log
|
||||
#log4j.appender.datanoderequestlog.RetainDays=3
|
||||
|
||||
#log4j.logger.http.requests.resourcemanager=INFO,resourcemanagerrequestlog
|
||||
#log4j.appender.resourcemanagerrequestlog=org.apache.hadoop.http.HttpRequestLogAppender
|
||||
#log4j.appender.resourcemanagerrequestlog.Filename=${hadoop.log.dir}/jetty-resourcemanager-yyyy_mm_dd.log
|
||||
#log4j.appender.resourcemanagerrequestlog.RetainDays=3
|
||||
|
||||
#log4j.logger.http.requests.jobhistory=INFO,jobhistoryrequestlog
|
||||
#log4j.appender.jobhistoryrequestlog=org.apache.hadoop.http.HttpRequestLogAppender
|
||||
#log4j.appender.jobhistoryrequestlog.Filename=${hadoop.log.dir}/jetty-jobhistory-yyyy_mm_dd.log
|
||||
#log4j.appender.jobhistoryrequestlog.RetainDays=3
|
||||
|
||||
#log4j.logger.http.requests.nodemanager=INFO,nodemanagerrequestlog
|
||||
#log4j.appender.nodemanagerrequestlog=org.apache.hadoop.http.HttpRequestLogAppender
|
||||
#log4j.appender.nodemanagerrequestlog.Filename=${hadoop.log.dir}/jetty-nodemanager-yyyy_mm_dd.log
|
||||
#log4j.appender.nodemanagerrequestlog.RetainDays=3
|
||||
|
||||
|
||||
# WebHdfs request log on datanodes
|
||||
# Specify -Ddatanode.webhdfs.logger=INFO,HTTPDRFA on datanode startup to
|
||||
# direct the log to a separate file.
|
||||
#datanode.webhdfs.logger=INFO,console
|
||||
#log4j.logger.datanode.webhdfs=${datanode.webhdfs.logger}
|
||||
#log4j.appender.HTTPDRFA=org.apache.log4j.DailyRollingFileAppender
|
||||
#log4j.appender.HTTPDRFA.File=${hadoop.log.dir}/hadoop-datanode-webhdfs.log
|
||||
#log4j.appender.HTTPDRFA.layout=org.apache.log4j.PatternLayout
|
||||
#log4j.appender.HTTPDRFA.layout.ConversionPattern=%d{ISO8601} %m%n
|
||||
#log4j.appender.HTTPDRFA.DatePattern=.yyyy-MM-dd
|
||||
|
||||
#
|
||||
# Fair scheduler state dump
|
||||
#
|
||||
# Use following logger to dump the state to a separate file
|
||||
|
||||
#log4j.logger.org.apache.hadoop.yarn.server.resourcemanager.scheduler.fair.FairScheduler.statedump=DEBUG,FSSTATEDUMP
|
||||
#log4j.additivity.org.apache.hadoop.yarn.server.resourcemanager.scheduler.fair.FairScheduler.statedump=false
|
||||
#log4j.appender.FSSTATEDUMP=org.apache.log4j.RollingFileAppender
|
||||
#log4j.appender.FSSTATEDUMP.File=${hadoop.log.dir}/fairscheduler-statedump.log
|
||||
#log4j.appender.FSSTATEDUMP.layout=org.apache.log4j.PatternLayout
|
||||
#log4j.appender.FSSTATEDUMP.layout.ConversionPattern=%d{ISO8601} %p %c: %m%n
|
||||
#log4j.appender.FSSTATEDUMP.MaxFileSize=${hadoop.log.maxfilesize}
|
||||
#log4j.appender.FSSTATEDUMP.MaxBackupIndex=${hadoop.log.maxbackupindex}
|
@ -0,0 +1,12 @@
|
||||
package com.henry.realprocess.bean
|
||||
|
||||
/**
|
||||
* @Author: Henry
|
||||
* @Description:
|
||||
* @Date: Create in 2019/10/20 15:58
|
||||
**/
|
||||
case class Message (
|
||||
var clickLog:ClickLog,
|
||||
var count:Long,
|
||||
var timeStamp:Long
|
||||
)
|
@ -0,0 +1,80 @@
|
||||
package com.henry.realprocess.task
|
||||
|
||||
import com.henry.realprocess.bean.ClickLogWide
|
||||
import com.henry.realprocess.task.ChannelBrowserTask.pvColName
|
||||
import org.apache.commons.lang.StringUtils
|
||||
import org.apache.flink.streaming.api.scala.{DataStream, KeyedStream, WindowedStream}
|
||||
import org.apache.flink.streaming.api.windowing.time.Time
|
||||
import org.apache.flink.streaming.api.windowing.windows.TimeWindow
|
||||
|
||||
/**
|
||||
* @Author: Henry
|
||||
* @Description:
|
||||
* @Date: Create in 2019/11/3 10:42
|
||||
**/
|
||||
|
||||
trait BaseTask[T] {
|
||||
|
||||
|
||||
// 1、 转换
|
||||
def map(clickLogWideDataStream : DataStream[ClickLogWide]): DataStream[T]
|
||||
|
||||
// 2、 分组
|
||||
def keyBy(mapDataStream : DataStream[T]): KeyedStream[T, String]
|
||||
|
||||
// 3、 时间窗口
|
||||
def timeWindow(keyedStream: KeyedStream[T, String]) : WindowedStream[T, String, TimeWindow] = {
|
||||
// 因为所有自类都是 3 秒的时间窗口
|
||||
keyedStream.timeWindow(Time.seconds(3))
|
||||
}
|
||||
|
||||
// 4、 聚合
|
||||
def reduce(windowedStream : WindowedStream[T, String, TimeWindow]) : DataStream[T]
|
||||
|
||||
// 5、 落地 HBase
|
||||
def sink2HBase(reduceDataStream: DataStream[T])
|
||||
|
||||
|
||||
// 定义模板执行顺序
|
||||
def process(clickLogWideDataStream : DataStream[ClickLogWide]): Unit = {
|
||||
val mapDataStream: DataStream[T] = map(clickLogWideDataStream)
|
||||
val keyedStream: KeyedStream[T, String] = keyBy(mapDataStream)
|
||||
val windowedStream: WindowedStream[T, String, TimeWindow] = timeWindow(keyedStream)
|
||||
val reduceStream: DataStream[T] = reduce(windowedStream)
|
||||
sink2HBase(reduceStream)
|
||||
}
|
||||
|
||||
// 检测老用户是否第一次访问
|
||||
val isOld = (isNew: Int, isDateNew: Int) => if (isNew == 0 && isDateNew == 1) 1 else 0
|
||||
|
||||
// 创建 HBase 相关列
|
||||
var tableName = ""
|
||||
var clfName = "info"
|
||||
var rowkey = ""
|
||||
var channelIdColName = "channelID"
|
||||
var browserColName = "browser"
|
||||
var dateColName = "date"
|
||||
var pvColName = "pv"
|
||||
var uvColName = "uv"
|
||||
var newCountColName = "newCount"
|
||||
var oldCountColName = "oldCount"
|
||||
|
||||
|
||||
/* 累加相关列的值
|
||||
* @param resultMap map集合
|
||||
* @param column 待查询的列
|
||||
* @param currentValue 当前值
|
||||
* @return 累加后的值
|
||||
*/
|
||||
def getTotal(resultMap: Map[String, String],column:String,currentValue:Long):Long={
|
||||
|
||||
var total = currentValue
|
||||
// 如果resultMap不为空,并且可以去到相关列的值,那么就进行累加
|
||||
if (resultMap != null && StringUtils.isNotBlank(resultMap.getOrElse(column,""))) {
|
||||
total = resultMap(column).toLong + currentValue
|
||||
}
|
||||
total
|
||||
}
|
||||
|
||||
|
||||
}
|
@ -0,0 +1,7 @@
|
||||
val bootstrap.servers = config.getString("bootstrap.servers")
|
||||
val zookeeper.connect = config.getString("zookeeper.connect")
|
||||
val input.topic = config.getString("input.topic")
|
||||
val gruop.id = config.getString("gruop.id")
|
||||
val enable.auto.commit = config.getString("enable.auto.commit")
|
||||
val auto.commit.interval.ms = config.getString("auto.commit.interval.ms")
|
||||
val auto.offset.reset = config.getString("auto.offset.reset")
|
@ -0,0 +1,18 @@
|
||||
package com.henry.report;
|
||||
|
||||
import org.springframework.boot.autoconfigure.SpringBootApplication;
|
||||
import org.springframework.boot.SpringApplication;
|
||||
/**
|
||||
* @Author: HongZhen
|
||||
* @Description:
|
||||
* @Date: Create in 2019/9/20 11:10
|
||||
**/
|
||||
|
||||
// 添加注解 @SpringBootApplication ,表示该类是一个启动类
|
||||
@SpringBootApplication
|
||||
public class ReportApplication {
|
||||
|
||||
public static void main(String[] args) {
|
||||
SpringApplication.run(ReportApplication.class, args);
|
||||
}
|
||||
}
|
@ -0,0 +1,136 @@
|
||||
package com.henry.report.bean;
|
||||
|
||||
/**
|
||||
* @Author: Henry
|
||||
* @Description: 点击流日志
|
||||
* @Date: Create in 2019/10/13 19:33
|
||||
**/
|
||||
|
||||
public class Clicklog {
|
||||
|
||||
// 频道 ID
|
||||
private long channelID;
|
||||
// 产品的类别 ID
|
||||
private long categoryID ;
|
||||
// 产品 ID
|
||||
private long produceID ;
|
||||
// 用户 ID
|
||||
private long userID ;
|
||||
|
||||
// 国家
|
||||
private String country;
|
||||
// 省份
|
||||
private String province;
|
||||
// 城市
|
||||
private String city;
|
||||
|
||||
// 网络方式
|
||||
private String network;
|
||||
// 来源方式
|
||||
private String source;
|
||||
|
||||
// 浏览器类型
|
||||
private String browserType;
|
||||
|
||||
// 进入网站时间
|
||||
private Long entryTime ;
|
||||
// 离开网站实际
|
||||
private long leaveTime;
|
||||
|
||||
public long getChannelID() {
|
||||
return channelID;
|
||||
}
|
||||
|
||||
public void setChannelID(long channelID) {
|
||||
this.channelID = channelID;
|
||||
}
|
||||
|
||||
public long getCategoryID() {
|
||||
return categoryID;
|
||||
}
|
||||
|
||||
public void setCategoryID(long categoryID) {
|
||||
this.categoryID = categoryID;
|
||||
}
|
||||
|
||||
public long getProduceID() {
|
||||
return produceID;
|
||||
}
|
||||
|
||||
public void setProduceID(long produceID) {
|
||||
this.produceID = produceID;
|
||||
}
|
||||
|
||||
public long getUserID() {
|
||||
return userID;
|
||||
}
|
||||
|
||||
public void setUserID(long userID) {
|
||||
this.userID = userID;
|
||||
}
|
||||
|
||||
public String getCountry() {
|
||||
return country;
|
||||
}
|
||||
|
||||
public void setCountry(String country) {
|
||||
this.country = country;
|
||||
}
|
||||
|
||||
public String getProvince() {
|
||||
return province;
|
||||
}
|
||||
|
||||
public void setProvince(String province) {
|
||||
this.province = province;
|
||||
}
|
||||
|
||||
public String getCity() {
|
||||
return city;
|
||||
}
|
||||
|
||||
public void setCity(String city) {
|
||||
this.city = city;
|
||||
}
|
||||
|
||||
public String getNetwork() {
|
||||
return network;
|
||||
}
|
||||
|
||||
public void setNetwork(String network) {
|
||||
this.network = network;
|
||||
}
|
||||
|
||||
public String getSource() {
|
||||
return source;
|
||||
}
|
||||
|
||||
public void setSource(String source) {
|
||||
this.source = source;
|
||||
}
|
||||
|
||||
public String getBrowserType() {
|
||||
return browserType;
|
||||
}
|
||||
|
||||
public void setBrowserType(String browserType) {
|
||||
this.browserType = browserType;
|
||||
}
|
||||
|
||||
public Long getEntryTime() {
|
||||
return entryTime;
|
||||
}
|
||||
|
||||
public void setEntryTime(Long entryTime) {
|
||||
this.entryTime = entryTime;
|
||||
}
|
||||
|
||||
public long getLeaveTime() {
|
||||
return leaveTime;
|
||||
}
|
||||
|
||||
public void setLeaveTime(long leaveTime) {
|
||||
this.leaveTime = leaveTime;
|
||||
}
|
||||
|
||||
}
|
@ -0,0 +1,51 @@
|
||||
package com.henry.report.bean;
|
||||
|
||||
/**
|
||||
* @Author: Henry
|
||||
* @Description: 消息实体类
|
||||
* @Date: Create in 2019/10/11 23:40
|
||||
**/
|
||||
public class Message {
|
||||
|
||||
// 消息次数
|
||||
private int count;
|
||||
|
||||
// 消息的时间戳
|
||||
private long timestamp;
|
||||
|
||||
// 消息体
|
||||
private String message;
|
||||
|
||||
public int getCount() {
|
||||
return count;
|
||||
}
|
||||
|
||||
public void setCount(int count) {
|
||||
this.count = count;
|
||||
}
|
||||
|
||||
public long getTimestamp() {
|
||||
return timestamp;
|
||||
}
|
||||
|
||||
public void setTimestamp(long timestamp) {
|
||||
this.timestamp = timestamp;
|
||||
}
|
||||
|
||||
public String getMessage() {
|
||||
return message;
|
||||
}
|
||||
|
||||
public void setMessage(String message) {
|
||||
this.message = message;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return "Message{" +
|
||||
"count=" + count +
|
||||
", timestamp=" + timestamp +
|
||||
", message='" + message + '\'' +
|
||||
'}';
|
||||
}
|
||||
}
|
@ -0,0 +1,139 @@
|
||||
package com.henry.report.util;
|
||||
|
||||
import com.alibaba.fastjson.JSONObject;
|
||||
import com.henry.report.bean.Clicklog;
|
||||
import org.apache.http.HttpResponse;
|
||||
import org.apache.http.HttpStatus;
|
||||
import org.apache.http.client.methods.HttpPost;
|
||||
import org.apache.http.entity.StringEntity;
|
||||
import org.apache.http.impl.client.CloseableHttpClient;
|
||||
import org.apache.http.impl.client.HttpClientBuilder;
|
||||
import org.apache.http.util.EntityUtils;
|
||||
|
||||
import java.text.DateFormat;
|
||||
import java.text.ParseException;
|
||||
import java.text.SimpleDateFormat;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Date;
|
||||
import java.util.List;
|
||||
import java.util.Random;
|
||||
|
||||
/**
|
||||
* @Author: Henry
|
||||
* @Description: 点击流日志模拟器
|
||||
* @Date: Create in 2019/10/13 20:00
|
||||
**/
|
||||
public class ClickLogGenerator {
|
||||
|
||||
// ID 信息
|
||||
private static Long[] channelID = new Long[]{1L, 2L, 3L, 4L, 5L, 6L, 7L, 8L, 9L, 10L, 11L, 12L, 13L, 14L, 15L, 16L, 17L, 18L, 19L, 20L};
|
||||
private static Long[] categoryID = new Long[]{1L, 2L, 3L, 4L, 5L, 6L, 7L, 8L, 9L, 10L, 11L, 12L, 13L, 14L, 15L, 16L, 17L, 18L, 19L, 20L};
|
||||
private static Long[] produceID = new Long[]{1L, 2L, 3L, 4L, 5L, 6L, 7L, 8L, 9L, 10L, 11L, 12L, 13L, 14L, 15L, 16L, 17L, 18L, 19L, 20L};
|
||||
private static Long[] userID = new Long[]{1L, 2L, 3L, 4L, 5L, 6L, 7L, 8L, 9L, 10L, 11L, 12L, 13L, 14L, 15L, 16L, 17L, 18L, 19L, 20L};
|
||||
|
||||
// 地区
|
||||
private static String[] contrys = new String[]{"china"}; // 地区-国家集合
|
||||
private static String[] provinces = new String[]{"HeNan", "HeBeijing"}; // 地区-省集合
|
||||
private static String[] citys = new String[]{"ShiJiaZhuang", "ZhengZhou", "LuoyYang"}; // 地区-市集合
|
||||
|
||||
// 网络方式
|
||||
private static String[] networks = new String[]{"电信", "移动", "联通"};
|
||||
|
||||
// 来源方式
|
||||
private static String[] sources = new String[]{"直接输入", "百度跳转", "360搜索跳转", "必应跳转"};
|
||||
|
||||
// 浏览器
|
||||
private static String[] browser = new String[]{"火狐", "QQ浏览器", "360浏览器", "谷歌浏览器"};
|
||||
|
||||
// 打开方式,离开时间
|
||||
private static List<Long[]> usertimeLog = producetimes();
|
||||
|
||||
// 获取时间
|
||||
private static List<Long[]> producetimes() {
|
||||
List<Long[]> usertimelog = new ArrayList<>();
|
||||
for (int i = 0; i < 100; i++) {
|
||||
Long[] timearray = gettimes("2019-10-10 24:60:60:000");
|
||||
usertimelog.add(timearray);
|
||||
}
|
||||
return usertimelog;
|
||||
}
|
||||
|
||||
private static Long[] gettimes(String time) {
|
||||
DateFormat dataFormat = new SimpleDateFormat("yyyy-MM-dd hh:mm:ss:SSS");
|
||||
try {
|
||||
Date date = dataFormat.parse(time);
|
||||
long timetemp = date.getTime();
|
||||
Random random = new Random();
|
||||
int randomint = random.nextInt(10);
|
||||
long starttime = timetemp - randomint*3600*1000;
|
||||
long endtime = starttime + randomint*3600*1000;
|
||||
return new Long[]{starttime,endtime};
|
||||
}catch (ParseException e){
|
||||
e.printStackTrace();
|
||||
}
|
||||
return new Long[]{0L, 0L};
|
||||
}
|
||||
|
||||
// 模拟发送 Http 请求到上报服务系统
|
||||
public static void send(String url, String json){
|
||||
try {
|
||||
CloseableHttpClient httpClient = HttpClientBuilder.create().build();
|
||||
HttpPost post = new HttpPost(url);
|
||||
JSONObject response = null ;
|
||||
try {
|
||||
StringEntity s = new StringEntity(json.toString(), "utf-8");
|
||||
s.setContentEncoding("utf-8");
|
||||
// 发送 json 数据需要设置 contentType
|
||||
s.setContentType("application/json");
|
||||
post.setEntity(s);
|
||||
|
||||
HttpResponse res = httpClient.execute(post);
|
||||
if(res.getStatusLine().getStatusCode() == HttpStatus.SC_OK){
|
||||
// 返回 json 格式
|
||||
String result = EntityUtils.toString(res.getEntity());
|
||||
System.out.println(result);
|
||||
}
|
||||
}catch (Exception e){
|
||||
throw new RuntimeException();
|
||||
|
||||
}
|
||||
|
||||
}catch (Exception e){
|
||||
e.printStackTrace();
|
||||
}
|
||||
}
|
||||
|
||||
public static void main(String[] args) {
|
||||
Random random = new Random();
|
||||
for (int i = 0; i < 100; i++) {
|
||||
// 频道id、类别id、产品id、用户id、打开时间、离开时间、地区、网络方式、来源方式、浏览器
|
||||
Clicklog clicklog = new Clicklog();
|
||||
|
||||
clicklog.setChannelID(channelID[random.nextInt(channelID.length)]);
|
||||
clicklog.setCategoryID(categoryID[random.nextInt(channelID.length)]);
|
||||
clicklog.setProduceID(produceID[random.nextInt(produceID.length)]);
|
||||
clicklog.setUserID(userID[random.nextInt(userID.length)]);
|
||||
clicklog.setCountry(contrys[random.nextInt(contrys.length)]);
|
||||
clicklog.setProvince(provinces[random.nextInt(provinces.length)]);
|
||||
clicklog.setCity(citys[random.nextInt(citys.length)]);
|
||||
clicklog.setNetwork(networks[random.nextInt(networks.length)]);
|
||||
clicklog.setSource(sources[random.nextInt(sources.length)]);
|
||||
clicklog.setBrowserType(browser[random.nextInt(browser.length)]);
|
||||
|
||||
Long[] times = usertimeLog.get(random.nextInt(usertimeLog.size()));
|
||||
clicklog.setEntryTime(times[0]);
|
||||
clicklog.setLeaveTime(times[1]);
|
||||
|
||||
// 将点击流日志转成字符串,发送到前端地址
|
||||
String jsonstr = JSONObject.toJSONString(clicklog);
|
||||
System.out.println(jsonstr);
|
||||
try {
|
||||
Thread.sleep(100);
|
||||
}catch (InterruptedException e){
|
||||
e.printStackTrace();
|
||||
}
|
||||
|
||||
send("http://localhost:1234/receive", jsonstr);
|
||||
}
|
||||
}
|
||||
}
|
@ -0,0 +1,46 @@
|
||||
package com.henry.report.util;
|
||||
|
||||
import org.apache.kafka.clients.producer.Partitioner;
|
||||
import org.apache.kafka.common.Cluster;
|
||||
|
||||
import java.util.Map;
|
||||
import java.util.concurrent.atomic.AtomicInteger;
|
||||
|
||||
/**
|
||||
* @Author: Henry
|
||||
* @Description: 自定义分区
|
||||
* @Date: Create in 2019/10/9 23:00
|
||||
**/
|
||||
|
||||
public class RoundRobinPartitioner implements Partitioner {
|
||||
|
||||
// AtomicInteger 并发包下的多线程安全的整型类
|
||||
AtomicInteger counter = new AtomicInteger(0) ;
|
||||
|
||||
|
||||
// 返回值为分区号: 0、1、2
|
||||
@Override
|
||||
public int partition(String topic, Object key, byte[] keyBytes, Object value, byte[] valueBytes, Cluster cluster) {
|
||||
|
||||
// 获取分区的数量
|
||||
Integer partitions = cluster.partitionCountForTopic(topic) ;
|
||||
|
||||
int curpartition = counter.incrementAndGet() % partitions ; // 当前轮询的 partition 号
|
||||
|
||||
if(counter.get() > 65535){
|
||||
counter.set(0);
|
||||
}
|
||||
|
||||
return curpartition;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void close() {
|
||||
|
||||
}
|
||||
|
||||
@Override
|
||||
public void configure(Map<String, ?> map) {
|
||||
|
||||
}
|
||||
}
|
@ -0,0 +1,18 @@
|
||||
# 修改 Tomcat 端口号
|
||||
server.port=1234
|
||||
|
||||
#
|
||||
# Kafka
|
||||
#
|
||||
# kafka的服务器地址
|
||||
kafka.bootstrap_servers_config=master:9092,slave1:9092,slave2:9092
|
||||
# 如果出现发送失败的情况,允许重试的次数
|
||||
kafka.retries_config=0
|
||||
# 每个批次发送多大的数据,单位:字节
|
||||
kafka.batch_size_config=4096
|
||||
# 定时发送,达到 1ms 发送
|
||||
kafka.linger_ms_config=1
|
||||
# 缓存的大小,单位:字节
|
||||
kafka.buffer_memory_config=40960
|
||||
# TOPOC 名字
|
||||
kafka.topic=pyg
|
@ -0,0 +1,29 @@
|
||||
package com.henry.report;
|
||||
|
||||
import org.junit.Test;
|
||||
import org.junit.runner.RunWith;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.boot.test.context.SpringBootTest;
|
||||
import org.springframework.kafka.core.KafkaTemplate;
|
||||
import org.springframework.test.context.junit4.SpringRunner;
|
||||
|
||||
/**
|
||||
* @Author: Henry
|
||||
* @Description: 测试Kafka
|
||||
* @Date: Create in 2019/10/8 23:26
|
||||
**/
|
||||
|
||||
@RunWith(SpringRunner.class)
|
||||
@SpringBootTest
|
||||
public class KafkaTest {
|
||||
|
||||
@Autowired
|
||||
KafkaTemplate kafkaTemplate;
|
||||
|
||||
@Test
|
||||
public void sendMsg(){
|
||||
for (int i = 0; i < 100; i++)
|
||||
kafkaTemplate.send("test", "key","this is test msg") ;
|
||||
}
|
||||
|
||||
}
|
After Width: | Height: | Size: 253 KiB |
After Width: | Height: | Size: 139 KiB |
After Width: | Height: | Size: 55 KiB |
After Width: | Height: | Size: 113 KiB |
After Width: | Height: | Size: 205 KiB |
After Width: | Height: | Size: 34 KiB |
After Width: | Height: | Size: 49 KiB |
After Width: | Height: | Size: 90 KiB |
After Width: | Height: | Size: 191 KiB |
After Width: | Height: | Size: 180 KiB |
After Width: | Height: | Size: 60 KiB |
After Width: | Height: | Size: 137 KiB |
After Width: | Height: | Size: 430 KiB |
After Width: | Height: | Size: 61 KiB |
After Width: | Height: | Size: 74 KiB |
After Width: | Height: | Size: 93 KiB |
After Width: | Height: | Size: 137 KiB |
After Width: | Height: | Size: 52 KiB |
After Width: | Height: | Size: 157 KiB |
After Width: | Height: | Size: 49 KiB |
After Width: | Height: | Size: 50 KiB |
After Width: | Height: | Size: 155 KiB |
After Width: | Height: | Size: 181 KiB |
After Width: | Height: | Size: 3.1 KiB |
After Width: | Height: | Size: 228 KiB |
After Width: | Height: | Size: 77 KiB |
After Width: | Height: | Size: 144 KiB |
After Width: | Height: | Size: 70 KiB |
After Width: | Height: | Size: 160 KiB |
After Width: | Height: | Size: 286 KiB |
After Width: | Height: | Size: 21 KiB |
After Width: | Height: | Size: 48 KiB |
After Width: | Height: | Size: 28 KiB |
After Width: | Height: | Size: 120 KiB |
After Width: | Height: | Size: 609 KiB |
After Width: | Height: | Size: 47 KiB |
After Width: | Height: | Size: 57 KiB |
After Width: | Height: | Size: 1.4 KiB |
After Width: | Height: | Size: 36 KiB |
After Width: | Height: | Size: 3.2 KiB |
After Width: | Height: | Size: 10 KiB |
After Width: | Height: | Size: 44 KiB |
After Width: | Height: | Size: 273 KiB |
After Width: | Height: | Size: 110 KiB |
After Width: | Height: | Size: 56 KiB |
After Width: | Height: | Size: 22 KiB |
After Width: | Height: | Size: 67 KiB |
After Width: | Height: | Size: 19 KiB |