Skip to content

Commit

Permalink
format
Browse files Browse the repository at this point in the history
  • Loading branch information
Zrealshadow committed Nov 1, 2022
1 parent 3f8bed8 commit 3b70b48
Show file tree
Hide file tree
Showing 23 changed files with 848 additions and 782 deletions.
30 changes: 17 additions & 13 deletions cool-core/src/main/java/com/nus/cool/core/io/readstore/ChunkRS.java
Original file line number Diff line number Diff line change
Expand Up @@ -16,14 +16,14 @@
* specific language governing permissions and limitations
* under the License.
*/

package com.nus.cool.core.io.readstore;

import com.nus.cool.core.io.Input;
import com.nus.cool.core.schema.ChunkType;
import com.nus.cool.core.schema.FieldType;
import com.nus.cool.core.schema.TableSchema;
import java.nio.ByteBuffer;

import lombok.Getter;

/**
Expand Down Expand Up @@ -51,13 +51,13 @@
public class ChunkRS implements Input {

/**
* number of record in this chunk
* number of record in this chunk.
*/
@Getter
private int records;

/**
* field array in this chunk
* field array in this chunk.
*/
private FieldRS[] fields;

Expand Down Expand Up @@ -97,7 +97,8 @@ public void readFrom(ByteBuffer buffer) {

this.fields = new FieldRS[fields];

MetaUserFieldRS userMetaField = (MetaUserFieldRS) this.metaChunkRS.getMetaField(tableSchema.getUserKeyFieldName());
MetaUserFieldRS userMetaField = (MetaUserFieldRS) this.metaChunkRS.getMetaField(
tableSchema.getUserKeyFieldName());

// initialized UserDataField first, it will become args for invariant field
DataHashFieldRS userDataField = new DataHashFieldRS();
Expand All @@ -115,26 +116,29 @@ public void readFrom(ByteBuffer buffer) {

if (FieldType.isHashType(fieldType)) {
if (tableSchema.isInvariantField(i)) {
int invariant_idx = tableSchema.getInvariantFieldFlagMap()[i];
int invariantIdx = tableSchema.getInvariantFieldFlagMap()[i];
// invariant_idx != -1;
this.fields[i] = new DataInvariantHashFieldRS(fieldType, invariant_idx, userMetaField, userDataField);
} else
this.fields[i] = new DataInvariantHashFieldRS(
fieldType, invariantIdx, userMetaField, userDataField);
} else {
this.fields[i] = DataHashFieldRS.readFrom(buffer, fieldType);
}
} else {
if (tableSchema.isInvariantField(i)) {
int invariant_idx = tableSchema.getInvariantFieldFlagMap()[i];
this.fields[i] = new DataInvariantRangeFieldRS(fieldType, invariant_idx, userMetaField, userDataField);
} else
int invariantIdx = tableSchema.getInvariantFieldFlagMap()[i];
this.fields[i] = new DataInvariantRangeFieldRS(
fieldType, invariantIdx, userMetaField, userDataField);
} else {
this.fields[i] = DataRangeFieldRS.readFrom(buffer, fieldType);
}
}
}
}

/**
* Get the filed information according to index
*
* Get the filed information according to index.
*
* @param i index of filed
* @return
*/
public FieldRS getField(int i) {
return this.fields[i];
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,7 @@
* specific language governing permissions and limitations
* under the License.
*/

package com.nus.cool.core.io.readstore;

import com.nus.cool.core.io.compression.SimpleBitSetCompressor;
Expand All @@ -27,7 +28,7 @@
import java.util.BitSet;

/**
* Cool field read store, both hash field and range field
* Cool field read store, both hash field and range field.
* <p>
* hash field Layout
* -----------------
Expand Down Expand Up @@ -56,17 +57,17 @@ public class CoolFieldRS implements FieldRS {
private int maxKey;

/**
* key vector for hash field, store globalIDs
* key vector for hash field, store globalIDs.
*/
private InputVector keyVec = null;

/**
* value vector for hash field
* value vector for hash field.
*/
private InputVector valueVec = null;

/**
* BitSet array if this field has been pre-calculated
* BitSet array if this field has been pre-calculated.
*/
private BitSet[] bitSets = null;

Expand Down Expand Up @@ -106,6 +107,12 @@ public boolean isSetField() {
return this.bSetField;
}

/**
* IO interface.
*
* @param buffer input
* @param fieldType fieldtype
*/
public void readFromWithFieldType(ByteBuffer buffer, FieldType fieldType) {
this.fieldType = fieldType;
int bufGet = buffer.get();
Expand All @@ -129,17 +136,15 @@ public void readFromWithFieldType(ByteBuffer buffer, FieldType fieldType) {
if (codec == Codec.PreCAL) {
int values = buffer.get();
this.bitSets = new BitSet[values];
for (int i = 0; i < values; i++) {
this.bitSets[i] = SimpleBitSetCompressor.read(buffer);
}
for (int i = 0; i < values; i++) {
this.bitSets[i] = SimpleBitSetCompressor.read(buffer);
}
} else {
buffer.position(buffer.position() - 1);
this.valueVec = InputVectorFactory.readFrom(buffer);
}
}



// ------ no used, keep compatiable with new version code
@Override
public int getValueByIndex(int idx) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -159,7 +159,7 @@ public synchronized String getFieldMeta(String fieldName) {
if (id < 0 || id >= this.fieldOffsets.length) {
return "";
}

if (this.fields.containsKey(id)) {
return this.fields.get(id).generateJson();
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,7 @@
* specific language governing permissions and limitations
* under the License.
*/

package com.nus.cool.core.io.readstore;

import static com.google.common.base.Preconditions.checkNotNull;
Expand All @@ -36,7 +37,7 @@
* <p>
* cublet layout
* ---------------------------------------------------------------------------
* |data chunk 1 | ... | data chunk n | meta chunk | header | header offset |
* |data chunk 1 | ... | data chunk n | meta chunk | header | header offset |
* ---------------------------------------------------------------------------
* <p>
* header layout
Expand All @@ -50,13 +51,13 @@
public class CubletRS implements Input {

/**
* MetaChunk for this cublet
* MetaChunk for this cublet.
*/
@Getter
private MetaChunkRS metaChunk;

/**
* BitSet list for query result
* BitSet list for query result.
*/
private List<BitSet> bitSets = Lists.newArrayList();

Expand All @@ -77,32 +78,31 @@ public CubletRS(TableSchema schema) {
}

/**
* deserialize a cublet from a byte buffer
* deserialize a cublet from a byte buffer.
*/
@Override
public void readFrom(ByteBuffer buffer) {
// Read header offset
int end = buffer.limit();
this.limit = end;
int headOffset;
buffer.position(end - Ints.BYTES); // one byte to store header offset
buffer.position(end - Ints.BYTES); // one byte to store header offset
int tag = buffer.getInt();
// if offset is got from last one byte
if (tag != 0) {
headOffset = tag;
}
// if offset is not got from last one byte, read two bytes
else {
} else {
// if offset is not got from last one byte, read two bytes
buffer.position(end - Ints.BYTES - Ints.BYTES);
int size = buffer.getInt();
buffer.position(end - Ints.BYTES - Ints.BYTES - Ints.BYTES);
end = buffer.getInt();
buffer.position(end - Ints.BYTES);
headOffset = buffer.getInt();
buffer.position(end);
for (; size > 0; size--) {
this.bitSets.add(SimpleBitSetCompressor.read(buffer));
}
for (; size > 0; size--) {
this.bitSets.add(SimpleBitSetCompressor.read(buffer));
}
}

// Get #chunk and chunk offsets
Expand All @@ -115,13 +115,13 @@ public void readFrom(ByteBuffer buffer) {

// read the metaChunk, which is the last one in #chunks
this.metaChunk = new MetaChunkRS(this.schema);
buffer.position(chunkOffsets[chunks-1]);
buffer.position(chunkOffsets[chunks - 1]);
int chunkHeadOffset = buffer.getInt();
buffer.position(chunkHeadOffset);
this.metaChunk.readFrom(buffer);

// read the dataChunk
for (int i = 0; i < chunks-1; i++) {
for (int i = 0; i < chunks - 1; i++) {
ChunkRS chunk = new ChunkRS(this.schema, this.metaChunk);
buffer.position(chunkOffsets[i]);
chunkHeadOffset = buffer.getInt();
Expand Down
Loading

0 comments on commit 3b70b48

Please sign in to comment.