public class DiskBytesWritable
extends java.lang.Object
implements org.apache.hadoop.io.WritableComparable
Constructor and Description |
---|
DiskBytesWritable(byte[] bytes) |
Modifier and Type | Method and Description |
---|---|
void |
append(byte[] bytes) |
int |
compareTo(java.lang.Object arg0) |
byte[] |
get()
Get the data from the BytesWritable.
|
int |
getCapacity()
Get the capacity, which is the maximum size that could handled without
resizing the backing storage.
|
int |
getSize()
Get the current size of the buffer.
|
void |
readFields(java.io.DataInput in) |
void |
set(byte[] newData,
int offset,
int length)
Set the value to a copy of the given byte range
|
void |
set(DiskBytesWritable newData)
Set the BytesWritable to the contents of the given newData.
|
void |
setCapacity(int new_cap)
Change the capacity of the backing storage.
|
void |
setSize(int size)
Change the size of the buffer.
|
void |
write(java.io.DataOutput out) |
public void append(byte[] bytes) throws java.io.IOException
java.io.IOException
public byte[] get() throws java.io.IOException
java.io.IOException
public int getSize()
public void setSize(int size)
size
- The new number of bytespublic int getCapacity()
public void setCapacity(int new_cap)
new_cap
- The new capacity in bytes.public void set(DiskBytesWritable newData)
newData
- the value to set this BytesWritable to.public void set(byte[] newData, int offset, int length)
newData
- the new values to copy inoffset
- the offset in newData to start atlength
- the number of bytes to copypublic void readFields(java.io.DataInput in) throws java.io.IOException
readFields
in interface org.apache.hadoop.io.Writable
java.io.IOException
public void write(java.io.DataOutput out) throws java.io.IOException
write
in interface org.apache.hadoop.io.Writable
java.io.IOException
public int compareTo(java.lang.Object arg0)
compareTo
in interface java.lang.Comparable
Copyright © 2012 Bixo Labs