HDDS-30. Fix TestContainerSQLCli. Contributed by Shashikant Banerjee.
This commit is contained in:
parent
6341c3a437
commit
7482963f1a
@ -176,29 +176,6 @@ public void shutdown() throws InterruptedException {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
|
||||||
public void testConvertBlockDB() throws Exception {
|
|
||||||
String dbOutPath = GenericTestUtils.getTempPath(
|
|
||||||
UUID.randomUUID() + "/out_sql.db");
|
|
||||||
String dbRootPath = conf.get(OzoneConfigKeys.OZONE_METADATA_DIRS);
|
|
||||||
String dbPath = dbRootPath + "/" + BLOCK_DB;
|
|
||||||
String[] args = {"-p", dbPath, "-o", dbOutPath};
|
|
||||||
|
|
||||||
cli.run(args);
|
|
||||||
|
|
||||||
Connection conn = connectDB(dbOutPath);
|
|
||||||
String sql = "SELECT * FROM blockContainer";
|
|
||||||
ResultSet rs = executeQuery(conn, sql);
|
|
||||||
while(rs.next()) {
|
|
||||||
String blockKey = rs.getString("blockKey");
|
|
||||||
String containerName = rs.getString("containerName");
|
|
||||||
assertTrue(blockContainerMap.containsKey(blockKey) &&
|
|
||||||
blockContainerMap.remove(blockKey).equals(containerName));
|
|
||||||
}
|
|
||||||
assertEquals(0, blockContainerMap.size());
|
|
||||||
Files.delete(Paths.get(dbOutPath));
|
|
||||||
}
|
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void testConvertNodepoolDB() throws Exception {
|
public void testConvertNodepoolDB() throws Exception {
|
||||||
String dbOutPath = GenericTestUtils.getTempPath(
|
String dbOutPath = GenericTestUtils.getTempPath(
|
||||||
|
@ -18,6 +18,8 @@
|
|||||||
package org.apache.hadoop.ozone.scm.cli;
|
package org.apache.hadoop.ozone.scm.cli;
|
||||||
|
|
||||||
import com.google.common.base.Preconditions;
|
import com.google.common.base.Preconditions;
|
||||||
|
import com.google.common.primitives.Longs;
|
||||||
|
import com.google.protobuf.ByteString;
|
||||||
import org.apache.commons.cli.BasicParser;
|
import org.apache.commons.cli.BasicParser;
|
||||||
import org.apache.commons.cli.CommandLine;
|
import org.apache.commons.cli.CommandLine;
|
||||||
import org.apache.commons.cli.Option;
|
import org.apache.commons.cli.Option;
|
||||||
@ -86,7 +88,7 @@ public class SQLCLI extends Configured implements Tool {
|
|||||||
// for container.db
|
// for container.db
|
||||||
private static final String CREATE_CONTAINER_INFO =
|
private static final String CREATE_CONTAINER_INFO =
|
||||||
"CREATE TABLE containerInfo (" +
|
"CREATE TABLE containerInfo (" +
|
||||||
"containerName TEXT PRIMARY KEY NOT NULL, " +
|
"containerID LONG PRIMARY KEY NOT NULL, " +
|
||||||
"leaderUUID TEXT NOT NULL)";
|
"leaderUUID TEXT NOT NULL)";
|
||||||
private static final String CREATE_CONTAINER_MEMBERS =
|
private static final String CREATE_CONTAINER_MEMBERS =
|
||||||
"CREATE TABLE containerMembers (" +
|
"CREATE TABLE containerMembers (" +
|
||||||
@ -100,8 +102,8 @@ public class SQLCLI extends Configured implements Tool {
|
|||||||
"ipAddress TEXT, " +
|
"ipAddress TEXT, " +
|
||||||
"containerPort INTEGER NOT NULL);";
|
"containerPort INTEGER NOT NULL);";
|
||||||
private static final String INSERT_CONTAINER_INFO =
|
private static final String INSERT_CONTAINER_INFO =
|
||||||
"INSERT INTO containerInfo (containerName, leaderUUID) " +
|
"INSERT INTO containerInfo (containerID, leaderUUID) " +
|
||||||
"VALUES (\"%s\", \"%s\")";
|
"VALUES (\"%d\", \"%s\")";
|
||||||
private static final String INSERT_DATANODE_INFO =
|
private static final String INSERT_DATANODE_INFO =
|
||||||
"INSERT INTO datanodeInfo (hostname, datanodeUUid, ipAddress, " +
|
"INSERT INTO datanodeInfo (hostname, datanodeUUid, ipAddress, " +
|
||||||
"containerPort) " +
|
"containerPort) " +
|
||||||
@ -109,14 +111,6 @@ public class SQLCLI extends Configured implements Tool {
|
|||||||
private static final String INSERT_CONTAINER_MEMBERS =
|
private static final String INSERT_CONTAINER_MEMBERS =
|
||||||
"INSERT INTO containerMembers (containerName, datanodeUUID) " +
|
"INSERT INTO containerMembers (containerName, datanodeUUID) " +
|
||||||
"VALUES (\"%s\", \"%s\")";
|
"VALUES (\"%s\", \"%s\")";
|
||||||
// for block.db
|
|
||||||
private static final String CREATE_BLOCK_CONTAINER =
|
|
||||||
"CREATE TABLE blockContainer (" +
|
|
||||||
"blockKey TEXT PRIMARY KEY NOT NULL, " +
|
|
||||||
"containerName TEXT NOT NULL)";
|
|
||||||
private static final String INSERT_BLOCK_CONTAINER =
|
|
||||||
"INSERT INTO blockContainer (blockKey, containerName) " +
|
|
||||||
"VALUES (\"%s\", \"%s\")";
|
|
||||||
// for nodepool.db
|
// for nodepool.db
|
||||||
private static final String CREATE_NODE_POOL =
|
private static final String CREATE_NODE_POOL =
|
||||||
"CREATE TABLE nodePool (" +
|
"CREATE TABLE nodePool (" +
|
||||||
@ -291,9 +285,6 @@ public int run(String[] args) throws Exception {
|
|||||||
if (dbName.toString().endsWith(CONTAINER_DB_SUFFIX)) {
|
if (dbName.toString().endsWith(CONTAINER_DB_SUFFIX)) {
|
||||||
LOG.info("Converting container DB");
|
LOG.info("Converting container DB");
|
||||||
convertContainerDB(dbPath, outPath);
|
convertContainerDB(dbPath, outPath);
|
||||||
} else if (dbName.toString().equals(BLOCK_DB)) {
|
|
||||||
LOG.info("Converting block DB");
|
|
||||||
convertBlockDB(dbPath, outPath);
|
|
||||||
} else if (dbName.toString().equals(NODEPOOL_DB)) {
|
} else if (dbName.toString().equals(NODEPOOL_DB)) {
|
||||||
LOG.info("Converting node pool DB");
|
LOG.info("Converting node pool DB");
|
||||||
convertNodePoolDB(dbPath, outPath);
|
convertNodePoolDB(dbPath, outPath);
|
||||||
@ -498,14 +489,14 @@ private void convertContainerDB(Path dbPath, Path outPath)
|
|||||||
|
|
||||||
HashSet<String> uuidChecked = new HashSet<>();
|
HashSet<String> uuidChecked = new HashSet<>();
|
||||||
dbStore.iterate(null, (key, value) -> {
|
dbStore.iterate(null, (key, value) -> {
|
||||||
String containerName = new String(key, encoding);
|
long containerID = Longs.fromByteArray(key);
|
||||||
ContainerInfo containerInfo = null;
|
ContainerInfo containerInfo = null;
|
||||||
containerInfo = ContainerInfo.fromProtobuf(
|
containerInfo = ContainerInfo.fromProtobuf(
|
||||||
HddsProtos.SCMContainerInfo.PARSER.parseFrom(value));
|
HddsProtos.SCMContainerInfo.PARSER.parseFrom(value));
|
||||||
Preconditions.checkNotNull(containerInfo);
|
Preconditions.checkNotNull(containerInfo);
|
||||||
try {
|
try {
|
||||||
//TODO: include container state to sqllite schema
|
//TODO: include container state to sqllite schema
|
||||||
insertContainerDB(conn, containerName,
|
insertContainerDB(conn, containerID,
|
||||||
containerInfo.getPipeline().getProtobufMessage(), uuidChecked);
|
containerInfo.getPipeline().getProtobufMessage(), uuidChecked);
|
||||||
return true;
|
return true;
|
||||||
} catch (SQLException e) {
|
} catch (SQLException e) {
|
||||||
@ -518,16 +509,16 @@ private void convertContainerDB(Path dbPath, Path outPath)
|
|||||||
/**
|
/**
|
||||||
* Insert into the sqlite DB of container.db.
|
* Insert into the sqlite DB of container.db.
|
||||||
* @param conn the connection to the sqlite DB.
|
* @param conn the connection to the sqlite DB.
|
||||||
* @param containerName the name of the container.
|
* @param containerID the id of the container.
|
||||||
* @param pipeline the actual container pipeline object.
|
* @param pipeline the actual container pipeline object.
|
||||||
* @param uuidChecked the uuid that has been already inserted.
|
* @param uuidChecked the uuid that has been already inserted.
|
||||||
* @throws SQLException throws exception.
|
* @throws SQLException throws exception.
|
||||||
*/
|
*/
|
||||||
private void insertContainerDB(Connection conn, String containerName,
|
private void insertContainerDB(Connection conn, long containerID,
|
||||||
Pipeline pipeline, Set<String> uuidChecked) throws SQLException {
|
Pipeline pipeline, Set<String> uuidChecked) throws SQLException {
|
||||||
LOG.info("Insert to sql container db, for container {}", containerName);
|
LOG.info("Insert to sql container db, for container {}", containerID);
|
||||||
String insertContainerInfo = String.format(
|
String insertContainerInfo = String.format(
|
||||||
INSERT_CONTAINER_INFO, containerName,
|
INSERT_CONTAINER_INFO, containerID,
|
||||||
pipeline.getPipelineChannel().getLeaderID());
|
pipeline.getPipelineChannel().getLeaderID());
|
||||||
executeSQL(conn, insertContainerInfo);
|
executeSQL(conn, insertContainerInfo);
|
||||||
|
|
||||||
@ -546,49 +537,11 @@ private void insertContainerDB(Connection conn, String containerName,
|
|||||||
uuidChecked.add(uuid);
|
uuidChecked.add(uuid);
|
||||||
}
|
}
|
||||||
String insertContainerMembers = String.format(
|
String insertContainerMembers = String.format(
|
||||||
INSERT_CONTAINER_MEMBERS, containerName, uuid);
|
INSERT_CONTAINER_MEMBERS, containerID, uuid);
|
||||||
executeSQL(conn, insertContainerMembers);
|
executeSQL(conn, insertContainerMembers);
|
||||||
}
|
}
|
||||||
LOG.info("Insertion completed.");
|
LOG.info("Insertion completed.");
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* Converts block.db to sqlite. This is rather simple db, the schema has only
|
|
||||||
* one table:
|
|
||||||
*
|
|
||||||
* blockContainer
|
|
||||||
* --------------------------
|
|
||||||
* blockKey* | containerName
|
|
||||||
* --------------------------
|
|
||||||
*
|
|
||||||
* @param dbPath path to container db.
|
|
||||||
* @param outPath path to output sqlite
|
|
||||||
* @throws IOException throws exception.
|
|
||||||
*/
|
|
||||||
private void convertBlockDB(Path dbPath, Path outPath) throws Exception {
|
|
||||||
LOG.info("Create tables for sql block db.");
|
|
||||||
File dbFile = dbPath.toFile();
|
|
||||||
try (MetadataStore dbStore = MetadataStoreBuilder.newBuilder()
|
|
||||||
.setConf(conf).setDbFile(dbFile).build();
|
|
||||||
Connection conn = connectDB(outPath.toString())) {
|
|
||||||
executeSQL(conn, CREATE_BLOCK_CONTAINER);
|
|
||||||
|
|
||||||
dbStore.iterate(null, (key, value) -> {
|
|
||||||
String blockKey = DFSUtilClient.bytes2String(key);
|
|
||||||
String containerName = DFSUtilClient.bytes2String(value);
|
|
||||||
String insertBlockContainer = String.format(
|
|
||||||
INSERT_BLOCK_CONTAINER, blockKey, containerName);
|
|
||||||
|
|
||||||
try {
|
|
||||||
executeSQL(conn, insertBlockContainer);
|
|
||||||
return true;
|
|
||||||
} catch (SQLException e) {
|
|
||||||
throw new IOException(e);
|
|
||||||
}
|
|
||||||
});
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Converts nodePool.db to sqlite. The schema of sql db:
|
* Converts nodePool.db to sqlite. The schema of sql db:
|
||||||
* two tables, nodePool and datanodeInfo (the same datanode Info as for
|
* two tables, nodePool and datanodeInfo (the same datanode Info as for
|
||||||
|
Loading…
Reference in New Issue
Block a user