HDDS-30. Fix TestContainerSQLCli. Contributed by Shashikant Banerjee.

This commit is contained in:
Anu Engineer 2018-05-10 11:28:35 -07:00
parent 6341c3a437
commit 7482963f1a
2 changed files with 13 additions and 83 deletions

View File

@ -176,29 +176,6 @@ public void shutdown() throws InterruptedException {
}
}
@Test
public void testConvertBlockDB() throws Exception {
String dbOutPath = GenericTestUtils.getTempPath(
UUID.randomUUID() + "/out_sql.db");
String dbRootPath = conf.get(OzoneConfigKeys.OZONE_METADATA_DIRS);
String dbPath = dbRootPath + "/" + BLOCK_DB;
String[] args = {"-p", dbPath, "-o", dbOutPath};
cli.run(args);
Connection conn = connectDB(dbOutPath);
String sql = "SELECT * FROM blockContainer";
ResultSet rs = executeQuery(conn, sql);
while(rs.next()) {
String blockKey = rs.getString("blockKey");
String containerName = rs.getString("containerName");
assertTrue(blockContainerMap.containsKey(blockKey) &&
blockContainerMap.remove(blockKey).equals(containerName));
}
assertEquals(0, blockContainerMap.size());
Files.delete(Paths.get(dbOutPath));
}
@Test
public void testConvertNodepoolDB() throws Exception {
String dbOutPath = GenericTestUtils.getTempPath(

View File

@ -18,6 +18,8 @@
package org.apache.hadoop.ozone.scm.cli;
import com.google.common.base.Preconditions;
import com.google.common.primitives.Longs;
import com.google.protobuf.ByteString;
import org.apache.commons.cli.BasicParser;
import org.apache.commons.cli.CommandLine;
import org.apache.commons.cli.Option;
@ -86,7 +88,7 @@ public class SQLCLI extends Configured implements Tool {
// for container.db
private static final String CREATE_CONTAINER_INFO =
"CREATE TABLE containerInfo (" +
"containerName TEXT PRIMARY KEY NOT NULL, " +
"containerID LONG PRIMARY KEY NOT NULL, " +
"leaderUUID TEXT NOT NULL)";
private static final String CREATE_CONTAINER_MEMBERS =
"CREATE TABLE containerMembers (" +
@ -100,8 +102,8 @@ public class SQLCLI extends Configured implements Tool {
"ipAddress TEXT, " +
"containerPort INTEGER NOT NULL);";
private static final String INSERT_CONTAINER_INFO =
"INSERT INTO containerInfo (containerName, leaderUUID) " +
"VALUES (\"%s\", \"%s\")";
"INSERT INTO containerInfo (containerID, leaderUUID) " +
"VALUES (\"%d\", \"%s\")";
private static final String INSERT_DATANODE_INFO =
"INSERT INTO datanodeInfo (hostname, datanodeUUid, ipAddress, " +
"containerPort) " +
@ -109,14 +111,6 @@ public class SQLCLI extends Configured implements Tool {
private static final String INSERT_CONTAINER_MEMBERS =
"INSERT INTO containerMembers (containerName, datanodeUUID) " +
"VALUES (\"%s\", \"%s\")";
// for block.db
private static final String CREATE_BLOCK_CONTAINER =
"CREATE TABLE blockContainer (" +
"blockKey TEXT PRIMARY KEY NOT NULL, " +
"containerName TEXT NOT NULL)";
private static final String INSERT_BLOCK_CONTAINER =
"INSERT INTO blockContainer (blockKey, containerName) " +
"VALUES (\"%s\", \"%s\")";
// for nodepool.db
private static final String CREATE_NODE_POOL =
"CREATE TABLE nodePool (" +
@ -291,10 +285,7 @@ public int run(String[] args) throws Exception {
if (dbName.toString().endsWith(CONTAINER_DB_SUFFIX)) {
LOG.info("Converting container DB");
convertContainerDB(dbPath, outPath);
} else if (dbName.toString().equals(BLOCK_DB)) {
LOG.info("Converting block DB");
convertBlockDB(dbPath, outPath);
} else if (dbName.toString().equals(NODEPOOL_DB)) {
} else if (dbName.toString().equals(NODEPOOL_DB)) {
LOG.info("Converting node pool DB");
convertNodePoolDB(dbPath, outPath);
} else if (dbName.toString().equals(OPEN_CONTAINERS_DB)) {
@ -498,14 +489,14 @@ private void convertContainerDB(Path dbPath, Path outPath)
HashSet<String> uuidChecked = new HashSet<>();
dbStore.iterate(null, (key, value) -> {
String containerName = new String(key, encoding);
long containerID = Longs.fromByteArray(key);
ContainerInfo containerInfo = null;
containerInfo = ContainerInfo.fromProtobuf(
HddsProtos.SCMContainerInfo.PARSER.parseFrom(value));
Preconditions.checkNotNull(containerInfo);
try {
//TODO: include container state to sqllite schema
insertContainerDB(conn, containerName,
insertContainerDB(conn, containerID,
containerInfo.getPipeline().getProtobufMessage(), uuidChecked);
return true;
} catch (SQLException e) {
@ -518,16 +509,16 @@ private void convertContainerDB(Path dbPath, Path outPath)
/**
* Insert into the sqlite DB of container.db.
* @param conn the connection to the sqlite DB.
* @param containerName the name of the container.
* @param containerID the id of the container.
* @param pipeline the actual container pipeline object.
* @param uuidChecked the uuid that has been already inserted.
* @throws SQLException throws exception.
*/
private void insertContainerDB(Connection conn, String containerName,
private void insertContainerDB(Connection conn, long containerID,
Pipeline pipeline, Set<String> uuidChecked) throws SQLException {
LOG.info("Insert to sql container db, for container {}", containerName);
LOG.info("Insert to sql container db, for container {}", containerID);
String insertContainerInfo = String.format(
INSERT_CONTAINER_INFO, containerName,
INSERT_CONTAINER_INFO, containerID,
pipeline.getPipelineChannel().getLeaderID());
executeSQL(conn, insertContainerInfo);
@ -546,49 +537,11 @@ private void insertContainerDB(Connection conn, String containerName,
uuidChecked.add(uuid);
}
String insertContainerMembers = String.format(
INSERT_CONTAINER_MEMBERS, containerName, uuid);
INSERT_CONTAINER_MEMBERS, containerID, uuid);
executeSQL(conn, insertContainerMembers);
}
LOG.info("Insertion completed.");
}
/**
* Converts block.db to sqlite. This is rather simple db, the schema has only
* one table:
*
* blockContainer
* --------------------------
* blockKey* | containerName
* --------------------------
*
* @param dbPath path to container db.
* @param outPath path to output sqlite
* @throws IOException throws exception.
*/
private void convertBlockDB(Path dbPath, Path outPath) throws Exception {
LOG.info("Create tables for sql block db.");
File dbFile = dbPath.toFile();
try (MetadataStore dbStore = MetadataStoreBuilder.newBuilder()
.setConf(conf).setDbFile(dbFile).build();
Connection conn = connectDB(outPath.toString())) {
executeSQL(conn, CREATE_BLOCK_CONTAINER);
dbStore.iterate(null, (key, value) -> {
String blockKey = DFSUtilClient.bytes2String(key);
String containerName = DFSUtilClient.bytes2String(value);
String insertBlockContainer = String.format(
INSERT_BLOCK_CONTAINER, blockKey, containerName);
try {
executeSQL(conn, insertBlockContainer);
return true;
} catch (SQLException e) {
throw new IOException(e);
}
});
}
}
/**
* Converts nodePool.db to sqlite. The schema of sql db:
* two tables, nodePool and datanodeInfo (the same datanode Info as for