Skip to content

Commit

Permalink
fix issue 1920
Browse files Browse the repository at this point in the history
  • Loading branch information
ray6080 committed Aug 14, 2023
1 parent f4f1a77 commit 52bc97a
Show file tree
Hide file tree
Showing 6 changed files with 57 additions and 40 deletions.
18 changes: 16 additions & 2 deletions src/include/storage/copier/column_chunk.h
Original file line number Diff line number Diff line change
Expand Up @@ -163,11 +163,11 @@ class NullColumnChunk : public BoolColumnChunk {
inline void resetNullBuffer() { memset(buffer.get(), 0 /* non null */, numBytes); }

protected:
uint64_t numBytesForValues(common::offset_t numValues) const {
inline uint64_t numBytesForValues(common::offset_t numValues) const {
// 8 values per byte, and we need a buffer size which is a multiple of 8 bytes
return ceil(numValues / 8.0 / 8.0) * 8;
}
void initialize(common::offset_t numValues) final {
inline void initialize(common::offset_t numValues) final {
numBytesPerValue = 0;
numBytes = numBytesForValues(numValues);
// Each byte defaults to 0, indicating everything is non-null
Expand All @@ -186,6 +186,20 @@ class FixedListColumnChunk : public ColumnChunk {
void write(const common::Value& fixedListVal, uint64_t posToWrite) final;
};

class SerialColumnChunk : public ColumnChunk {
public:
SerialColumnChunk()
: ColumnChunk{common::LogicalType(common::LogicalTypeID::SERIAL),
nullptr /* copyDescription */, false /* hasNullChunk */} {}

inline void initialize(common::offset_t numValues) final {
numBytesPerValue = 0;
numBytes = 0;
// Each byte defaults to 0, indicating everything is non-null
buffer = std::make_unique<uint8_t[]>(numBytes);
}
};

struct ColumnChunkFactory {
static std::unique_ptr<ColumnChunk> createColumnChunk(
const common::LogicalType& dataType, common::CopyDescription* copyDescription = nullptr);
Expand Down
30 changes: 17 additions & 13 deletions src/storage/copier/column_chunk.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -436,29 +436,33 @@ std::unique_ptr<ColumnChunk> ColumnChunkFactory::createColumnChunk(
const LogicalType& dataType, CopyDescription* copyDescription) {
std::unique_ptr<ColumnChunk> chunk;
switch (dataType.getPhysicalType()) {
case PhysicalTypeID::BOOL:
case PhysicalTypeID::BOOL: {
chunk = std::make_unique<BoolColumnChunk>(copyDescription);
break;
} break;
case PhysicalTypeID::INT64:
case PhysicalTypeID::INT32:
case PhysicalTypeID::INT16:
case PhysicalTypeID::DOUBLE:
case PhysicalTypeID::FLOAT:
case PhysicalTypeID::INTERVAL:
chunk = std::make_unique<ColumnChunk>(dataType, copyDescription);
break;
case PhysicalTypeID::FIXED_LIST:
case PhysicalTypeID::INTERVAL: {
if (dataType.getLogicalTypeID() == LogicalTypeID::SERIAL) {
chunk = std::make_unique<SerialColumnChunk>();
} else {
chunk = std::make_unique<ColumnChunk>(dataType, copyDescription);
}
} break;
case PhysicalTypeID::FIXED_LIST: {
chunk = std::make_unique<FixedListColumnChunk>(dataType, copyDescription);
break;
case PhysicalTypeID::STRING:
} break;
case PhysicalTypeID::STRING: {
chunk = std::make_unique<StringColumnChunk>(dataType, copyDescription);
break;
case PhysicalTypeID::VAR_LIST:
} break;
case PhysicalTypeID::VAR_LIST: {
chunk = std::make_unique<VarListColumnChunk>(dataType, copyDescription);
break;
case PhysicalTypeID::STRUCT:
} break;
case PhysicalTypeID::STRUCT: {
chunk = std::make_unique<StructColumnChunk>(dataType, copyDescription);
break;
} break;
default: {
throw NotImplementedException("ColumnChunkFactory::createColumnChunk for data type " +
LogicalTypeUtils::dataTypeToString(dataType) +
Expand Down
2 changes: 1 addition & 1 deletion src/storage/store/node_column.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -482,7 +482,7 @@ void SerialNodeColumn::lookup(

page_idx_t SerialNodeColumn::append(
ColumnChunk* columnChunk, page_idx_t startPageIdx, uint64_t nodeGroupIdx) {
// DO NOTHING.
metadataDA->resize(nodeGroupIdx + 1);
return 0;
}

Expand Down
1 change: 1 addition & 0 deletions src/storage/store/node_table.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -98,6 +98,7 @@ void NodeTable::insert(Transaction* transaction, ValueVector* nodeIDVector,
column->write(nodeIDVector, propertyVectors[propertyIDToVectorIdx.at(propertyID)]);
}
}
wal->addToUpdatedNodeTables(tableID);
}

void NodeTable::update(
Expand Down
8 changes: 3 additions & 5 deletions src/storage/store/string_node_column.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -41,8 +41,7 @@ void StringNodeColumn::scan(transaction::Transaction* transaction, node_group_id
NodeColumn::scan(transaction, nodeGroupIdx, startOffsetInGroup, endOffsetInGroup, resultVector,
offsetInVector);
auto numValuesToRead = endOffsetInGroup - startOffsetInGroup;
auto overflowPageIdx =
overflowMetadataDA->get(nodeGroupIdx, TransactionType::READ_ONLY).pageIdx;
auto overflowPageIdx = overflowMetadataDA->get(nodeGroupIdx, transaction->getType()).pageIdx;
for (auto i = 0u; i < numValuesToRead; i++) {
auto pos = offsetInVector + i;
if (resultVector->isNull(pos)) {
Expand Down Expand Up @@ -82,8 +81,7 @@ void StringNodeColumn::scanInternal(
assert(startNodeOffset % DEFAULT_VECTOR_CAPACITY == 0);
auto nodeGroupIdx = StorageUtils::getNodeGroupIdxFromNodeOffset(startNodeOffset);
NodeColumn::scanInternal(transaction, nodeIDVector, resultVector);
auto overflowPageIdx =
overflowMetadataDA->get(nodeGroupIdx, TransactionType::READ_ONLY).pageIdx;
auto overflowPageIdx = overflowMetadataDA->get(nodeGroupIdx, transaction->getType()).pageIdx;
for (auto i = 0u; i < nodeIDVector->state->selVector->selectedSize; i++) {
auto pos = nodeIDVector->state->selVector->selectedPositions[i];
if (resultVector->isNull(pos)) {
Expand All @@ -100,7 +98,7 @@ void StringNodeColumn::lookupInternal(
auto startNodeOffset = nodeIDVector->readNodeOffset(0);
auto overflowPageIdx = overflowMetadataDA
->get(StorageUtils::getNodeGroupIdxFromNodeOffset(startNodeOffset),
TransactionType::READ_ONLY)
transaction->getType())
.pageIdx;
NodeColumn::lookupInternal(transaction, nodeIDVector, resultVector);
for (auto i = 0u; i < nodeIDVector->state->selVector->selectedSize; i++) {
Expand Down
38 changes: 19 additions & 19 deletions test/test_files/tck/match/match1.test
Original file line number Diff line number Diff line change
Expand Up @@ -10,17 +10,17 @@
Binder exception: No node table exists in database.

# Matching all nodes
#-CASE Scenario2
#-STATEMENT CREATE NODE TABLE A(ID SERIAL, name STRING, PRIMARY KEY(ID));
#---- ok
#-STATEMENT CREATE NODE TABLE B(ID SERIAL, name STRING, PRIMARY KEY(ID));
#---- ok
#-STATEMENT CREATE (:A), (:B {name: 'b'});
#---- ok
#-STATEMENT MATCH (n) RETURN n;
#---- 2
#{_ID: 0:0, _LABEL: A, ID: 0}
#{_ID: 1:0, _LABEL: B, ID: 0, name: b}
-CASE Scenario2
-STATEMENT CREATE NODE TABLE A(ID SERIAL, name STRING, PRIMARY KEY(ID));
---- ok
-STATEMENT CREATE NODE TABLE B(ID SERIAL, name STRING, PRIMARY KEY(ID));
---- ok
-STATEMENT CREATE (:A), (:B {name: 'b'});
---- ok
-STATEMENT MATCH (n) RETURN n;
---- 2
{_ID: 0:0, _LABEL: A, ID: 0}
{_ID: 1:0, _LABEL: B, ID: 0, name: b}

# Matching nodes using multiple labels
-CASE Scenario3
Expand All @@ -38,14 +38,14 @@ Binder exception: No node table exists in database.
{_ID: 1:0, _LABEL: B, ID: 0}

# Simple node inlnie property predicate
#-CASE Scenario4
#-STATEMENT CREATE NODE TABLE A(ID SERIAL, name STRING, firstName STRING, PRIMARY KEY(ID));
#---- ok
#-STATEMENT CREATE (:A {name: 'bar'}), (:A {name: 'monkey'}), (:A {firstName: 'bar'});
#---- ok
#-STATEMENT MATCH (n {name: 'bar'}) RETURN n;
#---- 1
#{_ID: 0:0, _LABEL: A, ID: 0, name: bar}
-CASE Scenario4
-STATEMENT CREATE NODE TABLE A(ID SERIAL, name STRING, firstName STRING, PRIMARY KEY(ID));
---- ok
-STATEMENT CREATE (:A {name: 'bar'}), (:A {name: 'monkey'}), (:A {firstName: 'bar'});
---- ok
-STATEMENT MATCH (n {name: 'bar'}) RETURN n;
---- 1
{_ID: 0:0, _LABEL: A, ID: 0, name: bar}


# Use multiple MATCH clauses to do a Cartesian product
Expand Down

0 comments on commit 52bc97a

Please sign in to comment.