Thanks to visit codestin.com
Credit goes to github.com

Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -27,15 +27,25 @@
import java.util.Properties;

public class SuroClient4Test {
public static void main(String[] args) throws JsonProcessingException {
public static void main(String[] args) throws JsonProcessingException, InterruptedException {
// ip num_of_messages message_size sleep num_of_iterations
String ip = args[0];
int numMessages = Integer.parseInt(args[1]);
int messageSize = Integer.parseInt(args[2]);
int sleep = Integer.parseInt(args[3]);
int numIterations = Integer.parseInt(args[4]);

Properties props = new Properties();
props.setProperty(ClientConfig.LB_TYPE, "static");
props.setProperty(ClientConfig.LB_SERVER, args[0]);
props.setProperty(ClientConfig.LB_SERVER, ip);

SuroClient client = new SuroClient(props);
byte[] payload = createMessagePayload(Integer.parseInt(args[2]));
for (int i = 0; i < Integer.parseInt(args[1]); ++i) {
client.send(new Message(i % 2 == 0 ? "request_trace" : "nf_errors_log", payload));
byte[] payload = createMessagePayload(messageSize);
for (int n = 0; n < numIterations; ++n) {
for (int i = 0; i < numMessages; ++i) {
client.send(new Message(i % 2 == 0 ? "request_trace" : "nf_errors_log", payload));
}
Thread.sleep(sleep);
}
client.shutdown();
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -3,9 +3,11 @@
import com.netflix.suro.SuroPlugin;
import com.netflix.suro.sink.kafka.KafkaSink;
import com.netflix.suro.sink.localfile.LocalFileSink;
import com.netflix.suro.sink.notice.LogNotice;
import com.netflix.suro.sink.notice.NoNotice;
import com.netflix.suro.sink.notice.QueueNotice;
import com.netflix.suro.sink.notice.SQSNotice;
import com.netflix.suro.sink.remotefile.HdfsFileSink;
import com.netflix.suro.sink.remotefile.S3FileSink;
import com.netflix.suro.sink.remotefile.formatter.DateRegionStackFormatter;
import com.netflix.suro.sink.remotefile.formatter.SimpleDateFormatter;
Expand All @@ -23,6 +25,7 @@ protected void configure() {
this.addSinkType(KafkaSink.TYPE, KafkaSink.class);

this.addSinkType(S3FileSink.TYPE, S3FileSink.class);
this.addSinkType(HdfsFileSink.TYPE, HdfsFileSink.class);
this.addRemotePrefixFormatterType(DateRegionStackFormatter.TYPE, DateRegionStackFormatter.class);
this.addRemotePrefixFormatterType(SimpleDateFormatter.TYPE, SimpleDateFormatter.class);
this.addRemotePrefixFormatterType(StaticPrefixFormatter.TYPE, StaticPrefixFormatter.class);
Expand All @@ -32,5 +35,6 @@ protected void configure() {
this.addNoticeType(NoNotice.TYPE, NoNotice.class);
this.addNoticeType(QueueNotice.TYPE, QueueNotice.class);
this.addNoticeType(SQSNotice.TYPE, SQSNotice.class);
this.addNoticeType(LogNotice.TYPE, LogNotice.class);
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -360,11 +360,15 @@ public String getStat() {
*/
public void deleteFile(String filePath) {
int retryCount = 1;
while (new File(filePath).exists() && retryCount <= deleteFileRetryCount) {
while (retryCount <= deleteFileRetryCount) {
try {
Thread.sleep(1000 * retryCount);
writer.getFS().delete(new Path(filePath), false);
++retryCount;
if (writer.getFS().exists(new Path(filePath))) {
Thread.sleep(1000 * retryCount);
writer.getFS().delete(new Path(filePath), false);
++retryCount;
} else {
break;
}
} catch (Exception e) {
log.warn("Exception while deleting the file: " + e.getMessage(), e);
}
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,32 @@
package com.netflix.suro.sink.notice;

import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

public class LogNotice implements Notice<String> {
public static final String TYPE = "log";

private static Logger log = LoggerFactory.getLogger(LogNotice.class);

@Override
public void init() {

}

@Override
public boolean send(String message) {
log.info(message);

return true;
}

@Override
public String recv() {
return null;
}

@Override
public String getStat() {
return null;
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,78 @@
package com.netflix.suro.sink.remotefile;

import com.fasterxml.jackson.annotation.JsonCreator;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.google.common.base.Preconditions;
import com.netflix.suro.sink.localfile.FileNameFormatter;
import com.netflix.suro.sink.localfile.LocalFileSink;
import com.netflix.suro.sink.notice.Notice;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.codehaus.jettison.json.JSONObject;

import java.util.Properties;

public class HdfsFileSink extends RemoteFileSink {
public static final String TYPE = "hdfs";

private final String directory;
private final Notice<String> notice;
private final Configuration hadoopConf;

@JsonCreator
public HdfsFileSink(
@JsonProperty("localFileSink") LocalFileSink localFileSink,
@JsonProperty("directory") String directory,
@JsonProperty("concurrentUpload") int concurrentUpload,
@JsonProperty("notice") Notice notice,
@JsonProperty("prefixFormatter") RemotePrefixFormatter prefixFormatter,
@JsonProperty("batchUpload") boolean batchUpload,
@JsonProperty("properties") Properties properties
) {
super(localFileSink, prefixFormatter, concurrentUpload, batchUpload);

this.directory = directory;
this.notice = notice;
hadoopConf = new Configuration();
if (properties != null) {
for (String propertyName : properties.stringPropertyNames()) {
hadoopConf.set(propertyName, properties.getProperty(propertyName));
}
}

Preconditions.checkNotNull(directory, "directory is needed");
}

@Override
public String recvNotice() {
return notice.recv();
}

@Override
void initialize() {
// do nothing
}

@Override
void upload(String localFilePath, String remoteFilePath) throws Exception {
Path outFile = new Path(String.format("%s/%s", directory, remoteFilePath));
FileSystem fs = outFile.getFileSystem(hadoopConf);

fs.mkdirs(outFile.getParent());
fs.moveFromLocalFile(new Path(localFilePath), outFile);
}

@Override
void notify(String filePath, long fileSize) throws Exception {
JSONObject jsonMessage = new JSONObject();
jsonMessage.put("directory", directory);
jsonMessage.put("filePath", filePath);
jsonMessage.put("size", fileSize);
jsonMessage.put("collector", FileNameFormatter.localHostAddr);

if (!notice.send(jsonMessage.toString())) {
throw new RuntimeException("Notice failed");
}
}
}
Loading