Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Add bindings for SERIAL and BLOB for client APIs #1773

Merged
merged 2 commits into from
Jul 7, 2023
Merged
Show file tree
Hide file tree
Changes from 1 commit
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
6 changes: 4 additions & 2 deletions tools/java_api/src/jni/kuzu_java.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -855,7 +855,8 @@ JNIEXPORT jobject JNICALL Java_com_kuzudb_KuzuNative_kuzu_1value_1get_1value(
jobject ret = env->NewObject(retClass, ctor, val);
return ret;
}
case LogicalTypeID::INT64: {
case LogicalTypeID::INT64:
case LogicalTypeID::SERIAL: {
jclass retClass = env->FindClass("java/lang/Long");
jmethodID ctor = env->GetMethodID(retClass, "<init>", "(J)V");
jlong val = static_cast<jlong>(v->getValue<int64_t>());
Expand Down Expand Up @@ -926,7 +927,8 @@ JNIEXPORT jobject JNICALL Java_com_kuzudb_KuzuNative_kuzu_1value_1get_1value(
jobject ret = env->NewObject(retClass, ctor, iid.tableID, iid.offset);
return ret;
}
case LogicalTypeID::STRING: {
case LogicalTypeID::STRING:
case LogicalTypeID::BLOB: {
std::string str = v->getValue<std::string>();
jstring ret = env->NewStringUTF(str.c_str());
return ret;
Expand Down
42 changes: 22 additions & 20 deletions tools/java_api/src/test/java/com/kuzudb/test/TestHelper.java
Original file line number Diff line number Diff line change
Expand Up @@ -25,29 +25,31 @@ public static void loadData(String dbPath) throws IOException, KuzuObjectRefDest
BufferedReader reader;
db = new KuzuDatabase(dbPath, 0);
conn = new KuzuConnection(db);
try {
reader = new BufferedReader(new FileReader("./../../dataset/tinysnb/schema.cypher"));
String line = reader.readLine();
KuzuQueryResult result;

while (line != null) {
conn.query(line);
line = reader.readLine();
}

reader.close();

reader = new BufferedReader(new FileReader("./../../dataset/tinysnb/copy.cypher"));
reader = new BufferedReader(new FileReader("../../dataset/tinysnb/schema.cypher"));
String line;
do {
line = reader.readLine();
line = line.replace("dataset/tinysnb", "../../dataset/tinysnb");
result = conn.query(line);
result.destroy();
} while (line != null);
reader.close();

while (line != null) {
line = line.replace("dataset/tinysnb", "../../dataset/tinysnb");
conn.query(line);
line = reader.readLine();
}

reader.close();
} catch (IOException e) {
e.printStackTrace();
}
reader = new BufferedReader(new FileReader("../../dataset/tinysnb/copy.cypher"));
do {
line = reader.readLine();
line = line.replace("dataset/tinysnb", "../../dataset/tinysnb");
result = conn.query(line);
result.destroy();
} while (line != null);
reader.close();

result = conn.query("create node table moviesSerial (ID SERIAL, name STRING, length INT32, note STRING, PRIMARY KEY (ID));");
result.destroy();
result = conn.query("copy moviesSerial from \"../../dataset/tinysnb-serial/vMovies.csv\"");
result.destroy();
}
}
18 changes: 18 additions & 0 deletions tools/java_api/src/test/java/com/kuzudb/test/ValueTest.java
Original file line number Diff line number Diff line change
Expand Up @@ -400,6 +400,24 @@ void ValueGetINT64() throws KuzuObjectRefDestroyedException {
result.destroy();
}

@Test
void ValueGetSERIAL() throws KuzuObjectRefDestroyedException {
// SERIAL
KuzuQueryResult result = conn.query("MATCH (a:moviesSerial) WHERE a.ID = 2 RETURN a.ID;");
assertTrue(result.isSuccess());
assertTrue(result.hasNext());
KuzuFlatTuple flatTuple = result.getNext();
KuzuValue value = flatTuple.getValue(0);
assertTrue(value.isOwnedByCPP());
assertFalse(value.isNull());

assertTrue(value.getValue().equals(2L));
value.destroy();
flatTuple.destroy();
result.destroy();
}


@Test
void ValueGetFloat() throws KuzuObjectRefDestroyedException {
// FLOAT
Expand Down
6 changes: 4 additions & 2 deletions tools/nodejs_api/src_cpp/node_util.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,8 @@ Napi::Value Util::ConvertToNapiObject(const Value& value, Napi::Env env) {
case LogicalTypeID::INT32: {
return Napi::Number::New(env, value.getValue<int32_t>());
}
case LogicalTypeID::INT64: {
case LogicalTypeID::INT64:
case LogicalTypeID::SERIAL: {
return Napi::Number::New(env, value.getValue<int64_t>());
}
case LogicalTypeID::FLOAT: {
Expand All @@ -26,7 +27,8 @@ Napi::Value Util::ConvertToNapiObject(const Value& value, Napi::Env env) {
case LogicalTypeID::DOUBLE: {
return Napi::Number::New(env, value.getValue<double>());
}
case LogicalTypeID::STRING: {
case LogicalTypeID::STRING:
case LogicalTypeID::BLOB: {
return Napi::String::New(env, value.getValue<std::string>());
}
case LogicalTypeID::DATE: {
Expand Down
7 changes: 7 additions & 0 deletions tools/nodejs_api/test/common.js
Original file line number Diff line number Diff line change
Expand Up @@ -51,6 +51,13 @@ const initTests = async () => {
await conn.query(statement);
}

await conn.query(
"create node table moviesSerial (ID SERIAL, name STRING, length INT32, note STRING, PRIMARY KEY (ID))"
);
await conn.query(
'copy moviesSerial from "../../dataset/tinysnb-serial/vMovies.csv"'
);

global.dbPath = dbPath;
global.db = db;
global.conn = conn;
Expand Down
9 changes: 7 additions & 2 deletions tools/nodejs_api/test/test_connection.js
Original file line number Diff line number Diff line change
Expand Up @@ -208,8 +208,13 @@ describe("Get node table names", function () {
assert.exists(nodeTableNames);
assert.isArray(nodeTableNames);
nodeTableNames.sort();
assert.equal(nodeTableNames.length, 3);
assert.deepEqual(nodeTableNames, ["movies", "organisation", "person"]);
assert.equal(nodeTableNames.length, 4);
assert.deepEqual(nodeTableNames, [
"movies",
"moviesSerial",
"organisation",
"person",
]);
});
});

Expand Down
14 changes: 14 additions & 0 deletions tools/nodejs_api/test/test_data_type.js
Original file line number Diff line number Diff line change
Expand Up @@ -57,6 +57,20 @@ describe("INT64", function () {
});
});

describe("SERIAL", function () {
it("should convert SERIAL type", async function () {
const queryResult = await conn.query(
"MATCH (a:moviesSerial) WHERE a.ID = 2 RETURN a.ID;"
);
const result = await queryResult.getAll();
assert.equal(result.length, 1);
assert.equal(Object.keys(result[0]).length, 1);
assert.isTrue("a.ID" in result[0]);
assert.equal(typeof result[0]["a.ID"], "number");
assert.equal(result[0]["a.ID"], 2);
});
});

describe("FLOAT", function () {
it("should convert FLOAT type", async function () {
const queryResult = await conn.query(
Expand Down
6 changes: 4 additions & 2 deletions tools/python_api/src_cpp/py_query_result.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -80,7 +80,8 @@ py::object PyQueryResult::convertValueToPyObject(const Value& value) {
case LogicalTypeID::INT32: {
return py::cast(value.getValue<int32_t>());
}
case LogicalTypeID::INT64: {
case LogicalTypeID::INT64:
case LogicalTypeID::SERIAL: {
return py::cast(value.getValue<int64_t>());
}
case LogicalTypeID::FLOAT: {
Expand All @@ -89,7 +90,8 @@ py::object PyQueryResult::convertValueToPyObject(const Value& value) {
case LogicalTypeID::DOUBLE: {
return py::cast(value.getValue<double>());
}
case LogicalTypeID::STRING: {
case LogicalTypeID::STRING:
case LogicalTypeID::BLOB:{
return py::cast(value.getValue<std::string>());
}
case LogicalTypeID::DATE: {
Expand Down
105 changes: 65 additions & 40 deletions tools/python_api/test/conftest.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,37 +6,35 @@
sys.path.append('../build/')
import kuzu

# Note conftest is the default file name for sharing fixture through multiple test files. Do not change file name.
@pytest.fixture
def init_tiny_snb(tmp_path):
if os.path.exists(tmp_path):
shutil.rmtree(tmp_path)
output_path = str(tmp_path)
db = kuzu.Database(output_path)
conn = kuzu.Connection(db)
conn.execute("CREATE NODE TABLE person (ID INT64, fName STRING, gender INT64, isStudent BOOLEAN, isWorker BOOLEAN, "
"age INT64, eyeSight DOUBLE, birthdate DATE, registerTime TIMESTAMP, lastJobDuration "
"INTERVAL, workedHours INT64[], usedNames STRING[], courseScoresPerTerm INT64[][], grades INT64[4], "
"height float, PRIMARY KEY (ID))")
conn.execute("COPY person FROM \"../../../dataset/tinysnb/vPerson.csv\" (HEADER=true)")

def init_npy(conn):
conn.execute(
'create node table npyoned (i64 INT64,i32 INT32,i16 INT16,f64 DOUBLE,f32 FLOAT, PRIMARY KEY(i64));'
)
conn.execute(
'copy npyoned from ("../../../dataset/npy-1d/one_dim_int64.npy", "../../../dataset/npy-1d/one_dim_int32.npy", '
' "../../../dataset/npy-1d/one_dim_int16.npy", "../../../dataset/npy-1d/one_dim_double.npy", '
'"../../../dataset/npy-1d/one_dim_float.npy") by column;'
)
conn.execute(
"create rel table knows (FROM person TO person, date DATE, meetTime TIMESTAMP, validInterval INTERVAL, "
"comments STRING[], MANY_MANY);")
conn.execute("COPY knows FROM \"../../../dataset/tinysnb/eKnows.csv\"")
conn.execute("create node table organisation (ID INT64, name STRING, orgCode INT64, mark DOUBLE, score INT64, "
"history STRING,licenseValidInterval INTERVAL, rating DOUBLE, state STRUCT(revenue INT16, location "
"STRING[], stock STRUCT(price INT64[], volume INT64)), PRIMARY KEY (ID));")
conn.execute('COPY organisation FROM "../../../dataset/tinysnb/vOrganisation.csv"')
conn.execute('CREATE NODE TABLE movies (name STRING, length INT32, note STRING, description STRUCT(rating DOUBLE, '
'views INT64, release TIMESTAMP, film DATE), content BYTEA, PRIMARY KEY (name))')
conn.execute('COPY movies FROM "../../../dataset/tinysnb/vMovies.csv"')
conn.execute('create rel table workAt (FROM person TO organisation, year INT64, grading DOUBLE[2], rating float,'
' MANY_ONE)')
conn.execute('COPY workAt FROM "../../../dataset/tinysnb/eWorkAt.csv"')
'create node table npytwod (id INT64, i64 INT64[3], i32 INT32[3], i16 INT16[3], f64 DOUBLE[3], f32 FLOAT[3],'
'PRIMARY KEY(id));'
)
conn.execute(
'copy npytwod from ("../../../dataset/npy-2d/id_int64.npy", "../../../dataset/npy-2d/two_dim_int64.npy", '
'"../../../dataset/npy-2d/two_dim_int32.npy", "../../../dataset/npy-2d/two_dim_int16.npy", '
' "../../../dataset/npy-2d/two_dim_double.npy", "../../../dataset/npy-2d/two_dim_float.npy") by column;'
)


def init_tensor(conn):
conn.execute('create node table tensor (ID INT64, boolTensor BOOLEAN[], doubleTensor DOUBLE[][], '
'intTensor INT64[][][], oneDimInt INT64, PRIMARY KEY (ID));')
conn.execute(
'COPY tensor FROM "../../../dataset/tensor-list/vTensor.csv" (HEADER=true)')


def init_long_str(conn):
conn.execute(
"CREATE NODE TABLE personLongString (name STRING, spouse STRING, PRIMARY KEY(name))")
conn.execute(
Expand All @@ -45,29 +43,56 @@ def init_tiny_snb(tmp_path):
"CREATE REL TABLE knowsLongString (FROM personLongString TO personLongString, MANY_MANY)")
conn.execute(
'COPY knowsLongString FROM "../../../dataset/long-string-pk-tests/eKnows.csv"')
conn.execute(
'create node table npyoned (i64 INT64,i32 INT32,i16 INT16,f64 DOUBLE,f32 FLOAT, PRIMARY KEY(i64));'


def init_tinysnb(conn):
tiny_snb_path = os.path.abspath(
os.path.join(
os.path.dirname(os.path.abspath(__file__)),
"../../../dataset/tinysnb")
)
schema_path = os.path.join(tiny_snb_path, "schema.cypher")
with open(schema_path, "r") as f:
for line in f.readlines():
line = line.strip()
if line:
conn.execute(line)
copy_path = os.path.join(tiny_snb_path, "copy.cypher")
with open(copy_path, "r") as f:
for line in f.readlines():
line = line.strip()
line = line.replace("dataset/tinysnb", tiny_snb_path)
if line:
conn.execute(line)


def init_movie_serial(conn):
conn.execute(
'copy npyoned from ("../../../dataset/npy-1d/one_dim_int64.npy", "../../../dataset/npy-1d/one_dim_int32.npy", '
' "../../../dataset/npy-1d/one_dim_int16.npy", "../../../dataset/npy-1d/one_dim_double.npy", '
'"../../../dataset/npy-1d/one_dim_float.npy") by column;'
"create node table moviesSerial (ID SERIAL, name STRING, length INT32, note STRING, PRIMARY KEY (ID));"
)
conn.execute(
'create node table npytwod (id INT64, i64 INT64[3], i32 INT32[3], i16 INT16[3], f64 DOUBLE[3], f32 FLOAT[3],'
'PRIMARY KEY(id));'
)
conn.execute(
'copy npytwod from ("../../../dataset/npy-2d/id_int64.npy", "../../../dataset/npy-2d/two_dim_int64.npy", '
'"../../../dataset/npy-2d/two_dim_int32.npy", "../../../dataset/npy-2d/two_dim_int16.npy", '
' "../../../dataset/npy-2d/two_dim_double.npy", "../../../dataset/npy-2d/two_dim_float.npy") by column;'
'copy moviesSerial from "../../../dataset/tinysnb-serial/vMovies.csv"'
)


@pytest.fixture
def init_db(tmp_path):
if os.path.exists(tmp_path):
shutil.rmtree(tmp_path)
output_path = str(tmp_path)
db = kuzu.Database(output_path)
conn = kuzu.Connection(db)
init_tinysnb(conn)
init_npy(conn)
init_tensor(conn)
init_long_str(conn)
init_movie_serial(conn)
return output_path


@pytest.fixture
def establish_connection(init_tiny_snb):
db = kuzu.Database(init_tiny_snb, buffer_pool_size=256 * 1024 * 1024)
def establish_connection(init_db):
db = kuzu.Database(init_db, buffer_pool_size=256 * 1024 * 1024)
conn = kuzu.Connection(db, num_threads=4)
return conn, db

Expand Down
Loading
Loading