Thanks to visit codestin.com
Credit goes to github.com

Skip to content

Commit 043ec89

Browse files
committed
add DaPF
1 parent f659e28 commit 043ec89

39 files changed

+3749
-0
lines changed

DaPF/.gitignore

Lines changed: 47 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,47 @@
1+
### Java template
2+
*.class
3+
4+
# Package Files #
5+
# *.jar
6+
*.war
7+
*.ear
8+
9+
HELP.md
10+
target/
11+
.mvn/wrapper/maven-wrapper.jar
12+
.mvn/
13+
14+
### STS ###
15+
.apt_generated
16+
.classpath
17+
.factorypath
18+
.project
19+
.settings
20+
.springBeans
21+
.sts4-cache
22+
23+
### IntelliJ IDEA ###
24+
.idea
25+
*.iws
26+
*.iml
27+
*.ipr
28+
29+
### NetBeans ###
30+
/nbproject/private/
31+
/nbbuild/
32+
/dist/
33+
/nbdist/
34+
/.nb-gradle/
35+
build/
36+
37+
### VS Code ###
38+
.vscode/
39+
40+
41+
#data
42+
Stream-data/
43+
eventData.txt
44+
*.txt
45+
46+
.DS_Stroe
47+
*.zip

DaPF/README.md

Lines changed: 19 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,19 @@
1+
# DaPF
2+
This is the source code of DaPF!
3+
Please use jdk-11.0.2 or higher version!
4+
Kafka version is 2.11-1.1.0!
5+
6+
This code is just uesed for test, if the code is applied in other scenarios, problems may occur. Please use with caution!
7+
8+
## directories
9+
1. The Client folder contains the generation, sending, and receiving of subscriptions.
10+
2. The core folder contains static versions of rein, tama and opindex and a DaPF imeplementation(PhSIH implemented with REIN).
11+
3. The EventSender folder contains the sending of events.
12+
13+
## usage
14+
modify pom.xml and use maven to build your target jar application. Deploy a Kafka cluster and then configure then config files in **resource** directory.
15+
1. Run DaPF module(src/main/java/core/REIN_dynamic.java)
16+
2. Run subscription(src/main/java/Client/subProducer.java) module to generate subs
17+
3. Run event module(src/main/java/EventSender/EventProducer.java) module to generate events
18+
19+
then the DaPF work to work. For testing consumer infomation, you can build consumer(src/main/java/Client/Consumer.java) appication to verify.

DaPF/pom.xml

Lines changed: 128 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,128 @@
1+
<?xml version="1.0" encoding="UTF-8"?>
2+
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
3+
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
4+
<modelVersion>4.0.0</modelVersion>
5+
<groupId>cn.edu.cn.DaPF</groupId>
6+
<!-- <artifactId>DaPF-createSub</artifactId>-->
7+
<!-- <artifactId>DaPF-createEvent</artifactId>-->
8+
<!-- <artifactId>DaPF-subProducer</artifactId>-->
9+
<!-- <artifactId>DaPF-eventProducer</artifactId>-->
10+
<artifactId>DaPF-reinDynamic</artifactId>
11+
<!-- <artifactId>DaPF-rein</artifactId>-->
12+
<!-- <artifactId>DaPF-infoFeedBack</artifactId>-->
13+
<!-- <artifactId>DaPF-original-rein-dyn</artifactId>-->
14+
<version>0.0.1</version>
15+
<name>DaPF</name>
16+
<description>DaPF project for maven</description>
17+
18+
<properties>
19+
<java.version>11</java.version>
20+
<maven.compiler.source>11</maven.compiler.source>
21+
<maven.compiler.target>11</maven.compiler.target>
22+
</properties>
23+
24+
<dependencies>
25+
<dependency>
26+
<groupId>commons-lang</groupId>
27+
<artifactId>commons-lang</artifactId>
28+
<version>2.6</version>
29+
</dependency>
30+
<dependency>
31+
<groupId>org.influxdb</groupId>
32+
<artifactId>influxdb-java</artifactId>
33+
<version>2.15</version>
34+
</dependency>
35+
<dependency>
36+
<groupId>org.apache.kafka</groupId>
37+
<artifactId>kafka_2.11</artifactId>
38+
<version>1.1.0</version>
39+
</dependency>
40+
<dependency>
41+
<groupId>org.apache.kafka</groupId>
42+
<artifactId>kafka-clients</artifactId>
43+
<version>1.1.0</version>
44+
</dependency>
45+
<dependency>
46+
<groupId>org.apache.kafka</groupId>
47+
<artifactId>kafka-streams</artifactId>
48+
<version>1.1.0</version>
49+
</dependency>
50+
<dependency>
51+
<groupId>com.google.code.gson</groupId>
52+
<artifactId>gson</artifactId>
53+
<version>2.3.1</version>
54+
</dependency>
55+
<dependency>
56+
<groupId>org.slf4j</groupId>
57+
<artifactId>slf4j-api</artifactId>
58+
<version>1.7.21</version>
59+
</dependency>
60+
<dependency>
61+
<groupId>log4j</groupId>
62+
<artifactId>log4j</artifactId>
63+
<version>1.2.17</version>
64+
</dependency>
65+
<dependency>
66+
<groupId>org.slf4j</groupId>
67+
<artifactId>slf4j-log4j12</artifactId>
68+
<version>1.7.21</version>
69+
</dependency>
70+
</dependencies>
71+
72+
<build>
73+
<plugins>
74+
<!-- 打包jar文件时,配置manifest文件,加入lib包的jar依赖 -->
75+
<plugin>
76+
<groupId>org.apache.maven.plugins</groupId>
77+
<artifactId>maven-jar-plugin</artifactId>
78+
<configuration>
79+
<classesDirectory>target/classes/</classesDirectory>
80+
<archive>
81+
<manifest>
82+
<!-- 主函数的入口 -->
83+
<mainClass>core.DaPF_REIN</mainClass>
84+
<!-- <mainClass>EventSender.EventProducer</mainClass>-->
85+
<!-- <mainClass>Client.InfoFeedBack</mainClass>-->
86+
<!-- <mainClass>Client.SubProducer</mainClass>-->
87+
<!-- 打包时 MANIFEST.MF文件不记录的时间戳版本 -->
88+
<useUniqueVersions>false</useUniqueVersions>
89+
<addClasspath>true</addClasspath>
90+
<classpathPrefix>lib/</classpathPrefix>
91+
</manifest>
92+
<manifestEntries>
93+
<Class-Path>.</Class-Path>
94+
</manifestEntries>
95+
</archive>
96+
</configuration>
97+
</plugin>
98+
<plugin>
99+
<groupId>org.apache.maven.plugins</groupId>
100+
<artifactId>maven-dependency-plugin</artifactId>
101+
<executions>
102+
<execution>
103+
<id>copy-dependencies</id>
104+
<phase>package</phase>
105+
<goals>
106+
<goal>copy-dependencies</goal>
107+
</goals>
108+
<configuration>
109+
<type>jar</type>
110+
<includeTypes>jar</includeTypes>
111+
<!-- <useUniqueVersions>false</useUniqueVersions>-->
112+
<outputDirectory>
113+
${project.build.directory}/lib
114+
</outputDirectory>
115+
</configuration>
116+
</execution>
117+
</executions>
118+
</plugin>
119+
120+
<plugin>
121+
<groupId>org.apache.maven.plugins</groupId>
122+
<artifactId>maven-compiler-plugin</artifactId>
123+
<version>3.1</version>
124+
</plugin>
125+
</plugins>
126+
</build>
127+
128+
</project>

DaPF/src/.DS_Store

6 KB
Binary file not shown.

DaPF/src/main/.DS_Store

6 KB
Binary file not shown.

DaPF/src/main/java/.DS_Store

6 KB
Binary file not shown.
Lines changed: 194 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,194 @@
1+
package Client;
2+
3+
import MySerdes.ValueSerde;
4+
import Structure.EventVal;
5+
import Structure.SubscribeVal;
6+
import org.apache.kafka.clients.consumer.ConsumerRecord;
7+
import org.apache.kafka.clients.consumer.ConsumerRecords;
8+
import org.apache.kafka.clients.consumer.KafkaConsumer;
9+
import org.apache.kafka.clients.producer.KafkaProducer;
10+
import org.apache.kafka.clients.producer.ProducerRecord;
11+
import org.apache.kafka.common.errors.WakeupException;
12+
import utils.InfluxdbUtil;
13+
14+
import java.io.*;
15+
import java.util.Arrays;
16+
import java.util.Properties;
17+
18+
public class Consumer {
19+
20+
21+
public static void main(String[] args) {
22+
if(args.length < 2){
23+
System.out.println("Usage: Consumer -matchconfigfile -influxdbconfigfile ");
24+
System.exit(1);
25+
}
26+
27+
String config_filename = "";
28+
String influx_filename = "";
29+
try{
30+
config_filename = args[0];
31+
influx_filename = args[1];
32+
} catch (Throwable e){
33+
System.out.println("Usage: Consumer -matchconfigfile -influxdbconfigfile ");
34+
e.printStackTrace();
35+
System.exit(1);
36+
}
37+
38+
39+
// read consumer config file
40+
Properties properties = new Properties();
41+
try {
42+
InputStream inputStream = new FileInputStream(new File(config_filename));
43+
properties.load(inputStream);
44+
} catch (FileNotFoundException e) {
45+
System.err.println("properties file open failed!");
46+
e.printStackTrace();
47+
} catch (IOException e) {
48+
System.err.println("properties file read failed");
49+
e.printStackTrace();
50+
}
51+
String KafkaServer = properties.getProperty("KafkaServer");
52+
String clientId = properties.getProperty("clientId");
53+
String securityProto = properties.getProperty("security.protocol");
54+
String saslName = properties.getProperty("sasl.kerberos.service.name");
55+
56+
// define the variable of kafka connection prop
57+
Properties props = new Properties();
58+
props.put("bootstrap.servers", KafkaServer);
59+
props.put("group.id", "Consumer");
60+
props.put("key.deserializer", "org.apache.kafka.common.serialization.StringDeserializer");
61+
props.put("value.deserializer", ValueSerde.EventValDeserde.class.getName());
62+
if(securityProto != null && saslName != null){
63+
props.put("security.protocol", securityProto);
64+
props.put("sasl.kerberos.service.name", saslName);
65+
} else {
66+
System.out.println("WARNING: no authentication configuration");
67+
// System.exit(1);
68+
}
69+
70+
// create consumer
71+
KafkaConsumer<String, EventVal> consumer = new KafkaConsumer<>(props);
72+
// consumer subscribe
73+
consumer.subscribe(Arrays.asList(clientId));
74+
try{
75+
sendSubscribe(properties);
76+
} catch (Throwable e) {
77+
System.out.println("Sent subscribe error\n");
78+
System.exit(1);
79+
}
80+
InfluxdbUtil influx = InfluxdbUtil.setUp(influx_filename,"client-test");
81+
82+
//TopicPartition topicPartition = new TopicPartition("", 0);
83+
84+
// attach shutdown handler to catch control-c
85+
Runtime.getRuntime().addShutdownHook(new Thread("consumer-shutdown-hook") {
86+
@Override
87+
public void run() {
88+
consumer.wakeup();
89+
//latch.countDown();
90+
}
91+
});
92+
93+
// BufferedWriter bw = null;
94+
try {
95+
// File file = new File("src/main/resources/Stream-data/rcv-time.txt");
96+
// FileWriter fw = new FileWriter(file, true);
97+
// bw = new BufferedWriter(fw);
98+
99+
long tmpArriveTime = 0;
100+
double aver_delay_time = 0;
101+
int x = 0;
102+
// loop to poll events
103+
while(true) {
104+
ConsumerRecords<String, EventVal> records = consumer.poll(70);
105+
106+
/** ATTENTION: not accurate rcv time, for accurate, JMX shoudl be used**/
107+
tmpArriveTime = System.currentTimeMillis();
108+
// traverse all records
109+
for(ConsumerRecord<String, EventVal> record : records) {
110+
// decode to get EventVal object
111+
EventVal eVal = record.value();
112+
eVal.EventGetTime = tmpArriveTime;
113+
long tmpDelayTime = tmpArriveTime - eVal.EventProduceTime;
114+
115+
//save rcv_time
116+
String s = eVal.StockId + " "
117+
+ eVal.EventArriveTime + " "
118+
+ eVal.EventMatchTime + " "
119+
+ eVal.EventGetTime + " "
120+
+ tmpDelayTime;
121+
122+
// bw.write(s + "\n");
123+
124+
influx.consumerInsert(eVal);
125+
126+
}
127+
}
128+
} catch (WakeupException e) {
129+
130+
} catch (Throwable e) {
131+
System.exit(1);
132+
} finally {
133+
// try {
134+
// bw.close();
135+
// }catch (Exception e){
136+
// e.printStackTrace();
137+
// }
138+
consumer.close();
139+
}
140+
System.exit(0);
141+
}
142+
143+
//client's send subscribe func
144+
public static int sendSubscribe(Properties properties) {
145+
String securityProto = properties.getProperty("security.protocol");
146+
String saslName = properties.getProperty("sasl.kerberos.service.name");
147+
148+
Properties Props = new Properties();
149+
Props.put("bootstrap.servers", properties.getProperty("KafkaServer"));
150+
Props.put("key.serializer", "org.apache.kafka.common.serialization.StringSerializer");
151+
Props.put("value.serializer", ValueSerde.SubValSerde.class.getName());
152+
if(securityProto != null && saslName != null){
153+
Props.put("security.protocol", securityProto);
154+
Props.put("sasl.kerberos.service.name", saslName);
155+
} else {
156+
System.out.println("WARNING: no authentication configuration");
157+
}
158+
159+
KafkaProducer<String, SubscribeVal> producer = new KafkaProducer<>(Props);
160+
161+
String attrs[] = properties.getProperty("attributes").split(",");
162+
System.out.println(attrs[0]);
163+
System.out.println(attrs[1]);
164+
System.out.println(attrs[2]);
165+
166+
167+
SubscribeVal sVal = new SubscribeVal(attrs.length);
168+
sVal.SubId = properties.getProperty("clientId");
169+
// sVal.sub_num_id = properties.getProperty("Client" + "clientId");
170+
sVal.StockId = Integer.parseInt(properties.getProperty("stockId"));
171+
for(int j = 0; j < sVal.AttributeNum; j++){
172+
String nums_str[] = attrs[j].split(" ");
173+
sVal.subVals.get(j).attributeId = Integer.parseInt(nums_str[0]);
174+
sVal.subVals.get(j).min_val = Double.parseDouble(nums_str[1]);
175+
sVal.subVals.get(j).max_val = Double.parseDouble(nums_str[2]);
176+
}
177+
//Record
178+
ProducerRecord<String, SubscribeVal> record = new ProducerRecord<>("Sub", sVal);
179+
//send
180+
try {
181+
System.out.println(record);
182+
producer.send(record).get();
183+
//System.err.println("Producer Send " + i + " Success!");
184+
//Thread.sleep(stime[st]);
185+
} catch (Exception e) {
186+
e.printStackTrace();
187+
return 1;
188+
}
189+
190+
producer.close();
191+
192+
return 0;
193+
}
194+
}

0 commit comments

Comments
 (0)