Browse Source

refactoring

pull/809/head
Chris Lu 6 years ago
parent
commit
319ab6d98f
  1. 21
      other/java/hdfs/src/main/java/seaweed/hdfs/ByteBufferOutputStream.java
  2. 4
      other/java/hdfs/src/main/java/seaweed/hdfs/SeaweedOutputStream.java
  3. 7
      other/java/hdfs/src/main/java/seaweed/hdfs/SeaweedRead.java
  4. 7
      other/java/hdfs/src/main/java/seaweed/hdfs/SeaweedWrite.java
  5. 3
      other/java/hdfs/src/test/java/seaweed/hdfs/SeaweedReadTest.java

21
other/java/hdfs/src/main/java/seaweed/hdfs/ByteBufferOutputStream.java

@ -0,0 +1,21 @@
package seaweed.hdfs;
import java.io.IOException;
import java.io.OutputStream;
import java.nio.ByteBuffer;
public class ByteBufferOutputStream extends OutputStream {
private final ByteBuffer buf;
public ByteBufferOutputStream(ByteBuffer buf) {
this.buf = buf;
}
public void write(int b) throws IOException {
this.buf.put((byte)b);
}
public void write(byte[] b, int off, int len) throws IOException {
this.buf.put(b, off, len);
}
}

4
other/java/hdfs/src/main/java/seaweed/hdfs/SeaweedOutputStream.java

@ -24,6 +24,8 @@ import java.util.concurrent.LinkedBlockingQueue;
import java.util.concurrent.ThreadPoolExecutor;
import java.util.concurrent.TimeUnit;
import static seaweed.hdfs.SeaweedFileSystemStore.getParentDirectory;
public class SeaweedOutputStream extends OutputStream implements Syncable, StreamCapabilities {
private static final Logger LOG = LoggerFactory.getLogger(SeaweedOutputStream.class);
@ -79,7 +81,7 @@ public class SeaweedOutputStream extends OutputStream implements Syncable, Strea
LOG.debug("SeaweedWrite.writeMeta path: {} entry:{}", path, entry);
try {
SeaweedWrite.writeMeta(filerGrpcClient, path, entry);
SeaweedWrite.writeMeta(filerGrpcClient, getParentDirectory(path), entry);
} catch (Exception ex) {
throw new IOException(ex);
}

7
other/java/hdfs/src/main/java/seaweed/hdfs/SeaweedRead.java

@ -1,14 +1,11 @@
package seaweed.hdfs;
import org.apache.hadoop.hdfs.util.ByteBufferOutputStream;
import org.apache.http.HttpEntity;
import org.apache.http.HttpHeaders;
import org.apache.http.HttpResponse;
import org.apache.http.client.HttpClient;
import org.apache.http.client.methods.HttpGet;
import org.apache.http.impl.client.HttpClientBuilder;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import seaweedfs.client.FilerGrpcClient;
import seaweedfs.client.FilerProto;
@ -23,7 +20,7 @@ import java.util.Map;
public class SeaweedRead {
private static final Logger LOG = LoggerFactory.getLogger(SeaweedRead.class);
// private static final Logger LOG = LoggerFactory.getLogger(SeaweedRead.class);
// returns bytesRead
public static long read(FilerGrpcClient filerGrpcClient, List<VisibleInterval> visibleIntervals,
@ -81,7 +78,7 @@ public class SeaweedRead {
int len = (int) (chunkView.logicOffset - position + chunkView.size);
OutputStream outputStream = new ByteBufferOutputStream(ByteBuffer.wrap(buffer, startOffset, len));
entity.writeTo(outputStream);
LOG.debug("* read chunkView:{} startOffset:{} length:{}", chunkView, startOffset, len);
// LOG.debug("* read chunkView:{} startOffset:{} length:{}", chunkView, startOffset, len);
return len;

7
other/java/hdfs/src/main/java/seaweed/hdfs/SeaweedWrite.java

@ -1,6 +1,5 @@
package seaweed.hdfs;
import org.apache.hadoop.fs.Path;
import org.apache.http.HttpResponse;
import org.apache.http.client.methods.HttpPost;
import org.apache.http.entity.mime.HttpMultipartMode;
@ -14,8 +13,6 @@ import java.io.ByteArrayInputStream;
import java.io.IOException;
import java.io.InputStream;
import static seaweed.hdfs.SeaweedFileSystemStore.getParentDirectory;
public class SeaweedWrite {
public static void writeData(FilerProto.Entry.Builder entry,
@ -49,10 +46,10 @@ public class SeaweedWrite {
}
public static void writeMeta(final FilerGrpcClient filerGrpcClient,
final Path path, final FilerProto.Entry.Builder entry) {
final String parentDirectory, final FilerProto.Entry.Builder entry) {
filerGrpcClient.getBlockingStub().createEntry(
FilerProto.CreateEntryRequest.newBuilder()
.setDirectory(getParentDirectory(path))
.setDirectory(parentDirectory)
.setEntry(entry)
.build()
);

3
other/java/hdfs/src/test/java/seaweedfs/hdfs/SeaweedReadTest.java → other/java/hdfs/src/test/java/seaweed/hdfs/SeaweedReadTest.java

@ -1,7 +1,6 @@
package seaweedfs.hdfs;
package seaweed.hdfs;
import org.junit.Test;
import seaweed.hdfs.SeaweedRead;
import seaweedfs.client.FilerProto;
import java.util.ArrayList;
Loading…
Cancel
Save