Skip to content

Commit 210671d

Browse files
authored
Optimized delete table in hive using direct-sql. (Netflix#243)
* Optimized delete table in hive using direct-sql.
1 parent b9d99f3 commit 210671d

File tree

8 files changed

+209
-17
lines changed

8 files changed

+209
-17
lines changed

metacat-connector-hive/src/main/java/com/netflix/metacat/connector/hive/configs/HiveConnectorFastServiceConfig.java

+5-2
Original file line numberDiff line numberDiff line change
@@ -158,18 +158,21 @@ public SequenceGeneration sequenceGeneration(
158158
* @param connectorContext connector config
159159
* @param hiveJdbcTemplate hive JDBC template
160160
* @param serviceMetric fast service metric
161+
* @param directSqlSavePartition partition service involving direct sqls
161162
* @return HiveConnectorPartitionService
162163
*/
163164
@Bean
164165
public DirectSqlTable directSqlTable(
165166
final ConnectorContext connectorContext,
166167
@Qualifier("hiveJdbcTemplate") final JdbcTemplate hiveJdbcTemplate,
167-
final HiveConnectorFastServiceMetric serviceMetric
168+
final HiveConnectorFastServiceMetric serviceMetric,
169+
final DirectSqlSavePartition directSqlSavePartition
168170
) {
169171
return new DirectSqlTable(
170172
connectorContext,
171173
hiveJdbcTemplate,
172-
serviceMetric
174+
serviceMetric,
175+
directSqlSavePartition
173176
);
174177
}
175178

metacat-connector-hive/src/main/java/com/netflix/metacat/connector/hive/sql/DirectSqlSavePartition.java

+32
Original file line numberDiff line numberDiff line change
@@ -322,6 +322,34 @@ private List<PartitionSequenceIds> getPartitionSequenceIds(final QualifiedName t
322322
rs.getLong("serde_id")));
323323
}
324324

325+
/**
326+
* Delete all the partitions for the given table <code>tableQName</code>.
327+
*
328+
* @param tableQName table name
329+
*/
330+
public void delete(final QualifiedName tableQName) {
331+
final long start = registry.clock().wallTime();
332+
try {
333+
List<PartitionSequenceIds> partitionSequenceIds = getPartitionSequenceIds(tableQName);
334+
while (!partitionSequenceIds.isEmpty()) {
335+
_delete(partitionSequenceIds);
336+
partitionSequenceIds = getPartitionSequenceIds(tableQName);
337+
}
338+
} finally {
339+
this.fastServiceMetric.recordTimer(
340+
HiveMetrics.TagDropHivePartitions.getMetricName(), registry.clock().wallTime() - start);
341+
}
342+
}
343+
344+
private List<PartitionSequenceIds> getPartitionSequenceIds(final QualifiedName tableQName) {
345+
return jdbcTemplate.query(
346+
String.format(SQL.PARTITIONS_SELECT_ALL, this.batchSize),
347+
new Object[]{tableQName.getDatabaseName(), tableQName.getTableName()},
348+
new int[]{Types.VARCHAR, Types.VARCHAR},
349+
(rs, rowNum) -> new PartitionSequenceIds(rs.getLong("part_id"), rs.getLong("sd_id"),
350+
rs.getLong("serde_id")));
351+
}
352+
325353
@SuppressWarnings("checkstyle:methodname")
326354
private void _delete(final List<PartitionSequenceIds> subPartitionIds) {
327355
final List<String> paramVariables = subPartitionIds.stream().map(s -> "?").collect(Collectors.toList());
@@ -418,6 +446,10 @@ private static class SQL {
418446
"INSERT INTO PARTITION_KEY_VALS(PART_ID,PART_KEY_VAL,INTEGER_IDX) VALUES (?,?,?)";
419447
static final String PARTITION_KEY_VALS_DELETES =
420448
"DELETE FROM PARTITION_KEY_VALS WHERE PART_ID in (%s)";
449+
static final String PARTITIONS_SELECT_ALL =
450+
"SELECT P.PART_ID, P.SD_ID, S.SERDE_ID FROM DBS D JOIN TBLS T ON D.DB_ID=T.DB_ID "
451+
+ "JOIN PARTITIONS P ON T.TBL_ID=P.TBL_ID JOIN SDS S ON P.SD_ID=S.SD_ID "
452+
+ "WHERE D.NAME=? and T.TBL_NAME=? limit %d";
421453
static final String PARTITIONS_SELECT =
422454
"SELECT P.PART_ID, P.SD_ID, S.SERDE_ID FROM DBS D JOIN TBLS T ON D.DB_ID=T.DB_ID "
423455
+ "JOIN PARTITIONS P ON T.TBL_ID=P.TBL_ID JOIN SDS S ON P.SD_ID=S.SD_ID "

metacat-connector-hive/src/main/java/com/netflix/metacat/connector/hive/sql/DirectSqlTable.java

+79-4
Original file line numberDiff line numberDiff line change
@@ -22,12 +22,14 @@
2222
import com.google.common.collect.Maps;
2323
import com.netflix.metacat.common.QualifiedName;
2424
import com.netflix.metacat.common.server.connectors.ConnectorContext;
25+
import com.netflix.metacat.common.server.connectors.exception.ConnectorException;
2526
import com.netflix.metacat.common.server.connectors.exception.InvalidMetaException;
2627
import com.netflix.metacat.common.server.connectors.exception.TableNotFoundException;
2728
import com.netflix.metacat.connector.hive.monitoring.HiveMetrics;
2829
import com.netflix.metacat.connector.hive.util.HiveConnectorFastServiceMetric;
2930
import com.netflix.spectator.api.Registry;
3031
import lombok.extern.slf4j.Slf4j;
32+
import org.springframework.dao.DataAccessException;
3133
import org.springframework.dao.EmptyResultDataAccessException;
3234
import org.springframework.jdbc.core.JdbcTemplate;
3335
import org.springframework.jdbc.core.ResultSetExtractor;
@@ -60,23 +62,27 @@ public class DirectSqlTable {
6062
private final JdbcTemplate jdbcTemplate;
6163
private final HiveConnectorFastServiceMetric fastServiceMetric;
6264
private final String catalogName;
65+
private final DirectSqlSavePartition directSqlSavePartition;
6366

6467
/**
6568
* Constructor.
6669
*
67-
* @param connectorContext server context
68-
* @param jdbcTemplate JDBC template
69-
* @param fastServiceMetric fast service metric
70+
* @param connectorContext server context
71+
* @param jdbcTemplate JDBC template
72+
* @param fastServiceMetric fast service metric
73+
* @param directSqlSavePartition direct sql partition service
7074
*/
7175
public DirectSqlTable(
7276
final ConnectorContext connectorContext,
7377
final JdbcTemplate jdbcTemplate,
74-
final HiveConnectorFastServiceMetric fastServiceMetric
78+
final HiveConnectorFastServiceMetric fastServiceMetric,
79+
final DirectSqlSavePartition directSqlSavePartition
7580
) {
7681
this.catalogName = connectorContext.getCatalogName();
7782
this.registry = connectorContext.getRegistry();
7883
this.jdbcTemplate = jdbcTemplate;
7984
this.fastServiceMetric = fastServiceMetric;
85+
this.directSqlSavePartition = directSqlSavePartition;
8086
}
8187

8288
/**
@@ -222,6 +228,54 @@ public Long getTableId(final QualifiedName tableName) {
222228
}
223229
}
224230

231+
/**
232+
* Deletes all the table related information from the store.
233+
* @param tableName table name
234+
*/
235+
public void delete(final QualifiedName tableName) {
236+
try {
237+
final TableSequenceIds ids = getSequenceIds(tableName);
238+
directSqlSavePartition.delete(tableName);
239+
jdbcTemplate.update(SQL.UPDATE_SDS_CD, new SqlParameterValue(Types.BIGINT, null),
240+
new SqlParameterValue(Types.BIGINT, ids.getSdsId()));
241+
jdbcTemplate.update(SQL.UPDATE_SDS_SERDE, new SqlParameterValue(Types.BIGINT, null),
242+
new SqlParameterValue(Types.BIGINT, ids.getSdsId()));
243+
jdbcTemplate.update(SQL.DELETE_COLUMNS_V2, new SqlParameterValue(Types.BIGINT, ids.getCdId()));
244+
jdbcTemplate.update(SQL.DELETE_CDS, new SqlParameterValue(Types.BIGINT, ids.getCdId()));
245+
jdbcTemplate.update(SQL.DELETE_PARTITION_KEYS, new SqlParameterValue(Types.BIGINT, ids.getTableId()));
246+
jdbcTemplate.update(SQL.DELETE_TABLE_PARAMS, new SqlParameterValue(Types.BIGINT, ids.getTableId()));
247+
jdbcTemplate.update(SQL.DELETE_TAB_COL_STATS, new SqlParameterValue(Types.BIGINT, ids.getTableId()));
248+
jdbcTemplate.update(SQL.UPDATE_TABLE_SD, new SqlParameterValue(Types.BIGINT, null),
249+
new SqlParameterValue(Types.BIGINT, ids.getTableId()));
250+
jdbcTemplate.update(SQL.DELETE_SKEWED_COL_NAMES, new SqlParameterValue(Types.BIGINT, ids.getSdsId()));
251+
jdbcTemplate.update(SQL.DELETE_BUCKETING_COLS, new SqlParameterValue(Types.BIGINT, ids.getSdsId()));
252+
jdbcTemplate.update(SQL.DELETE_SORT_COLS, new SqlParameterValue(Types.BIGINT, ids.getSdsId()));
253+
jdbcTemplate.update(SQL.DELETE_SD_PARAMS, new SqlParameterValue(Types.BIGINT, ids.getSdsId()));
254+
jdbcTemplate.update(SQL.DELETE_SKEWED_COL_VALUE_LOC_MAP,
255+
new SqlParameterValue(Types.BIGINT, ids.getSdsId()));
256+
jdbcTemplate.update(SQL.DELETE_SKEWED_VALUES, new SqlParameterValue(Types.BIGINT, ids.getSdsId()));
257+
jdbcTemplate.update(SQL.DELETE_SERDE_PARAMS, new SqlParameterValue(Types.BIGINT, ids.getSerdeId()));
258+
jdbcTemplate.update(SQL.DELETE_SERDES, new SqlParameterValue(Types.BIGINT, ids.getSerdeId()));
259+
jdbcTemplate.update(SQL.DELETE_SDS, new SqlParameterValue(Types.BIGINT, ids.getSdsId()));
260+
jdbcTemplate.update(SQL.DELETE_TBLS, new SqlParameterValue(Types.BIGINT, ids.getTableId()));
261+
} catch (DataAccessException e) {
262+
throw new ConnectorException(String.format("Failed delete hive table %s", tableName), e);
263+
}
264+
}
265+
266+
private TableSequenceIds getSequenceIds(final QualifiedName tableName) {
267+
try {
268+
return jdbcTemplate.queryForObject(
269+
SQL.TABLE_SEQUENCE_IDS,
270+
new Object[]{tableName.getDatabaseName(), tableName.getTableName()},
271+
new int[]{Types.VARCHAR, Types.VARCHAR},
272+
(rs, rowNum) -> new TableSequenceIds(rs.getLong("tbl_id"), rs.getLong("cd_id"),
273+
rs.getLong("sd_id"), rs.getLong("serde_id")));
274+
} catch (EmptyResultDataAccessException e) {
275+
throw new TableNotFoundException(tableName);
276+
}
277+
}
278+
225279
@VisibleForTesting
226280
private static class SQL {
227281
static final String GET_TABLE_NAMES_BY_URI =
@@ -237,5 +291,26 @@ private static class SQL {
237291
"update TABLE_PARAMS set param_value=? WHERE tbl_id=? and param_key=?";
238292
static final String INSERT_TABLE_PARAMS =
239293
"insert into TABLE_PARAMS(tbl_id,param_key,param_value) values (?,?,?)";
294+
static final String UPDATE_SDS_CD = "UPDATE SDS SET CD_ID=? WHERE SD_ID=?";
295+
static final String DELETE_COLUMNS_V2 = "DELETE FROM COLUMNS_V2 WHERE CD_ID=?";
296+
static final String DELETE_CDS = "DELETE FROM CDS WHERE CD_ID=?";
297+
static final String DELETE_PARTITION_KEYS = "DELETE FROM PARTITION_KEYS WHERE TBL_ID=?";
298+
static final String DELETE_TABLE_PARAMS = "DELETE FROM TABLE_PARAMS WHERE TBL_ID=?";
299+
static final String DELETE_TAB_COL_STATS = "DELETE FROM TAB_COL_STATS WHERE TBL_ID=?";
300+
static final String UPDATE_TABLE_SD = "UPDATE TBLS SET SD_ID=? WHERE TBL_ID=?";
301+
static final String DELETE_SKEWED_COL_NAMES = "DELETE FROM SKEWED_COL_NAMES WHERE SD_ID=?";
302+
static final String DELETE_BUCKETING_COLS = "DELETE FROM BUCKETING_COLS WHERE SD_ID=?";
303+
static final String DELETE_SORT_COLS = "DELETE FROM SORT_COLS WHERE SD_ID=?";
304+
static final String DELETE_SD_PARAMS = "DELETE FROM SD_PARAMS WHERE SD_ID=?";
305+
static final String DELETE_SKEWED_COL_VALUE_LOC_MAP = "DELETE FROM SKEWED_COL_VALUE_LOC_MAP WHERE SD_ID=?";
306+
static final String DELETE_SKEWED_VALUES = "DELETE FROM SKEWED_VALUES WHERE SD_ID_OID=?";
307+
static final String UPDATE_SDS_SERDE = "UPDATE SDS SET SERDE_ID=? WHERE SD_ID=?";
308+
static final String DELETE_SERDE_PARAMS = "DELETE FROM SERDE_PARAMS WHERE SERDE_ID=?";
309+
static final String DELETE_SERDES = "DELETE FROM SERDES WHERE SERDE_ID=?";
310+
static final String DELETE_SDS = "DELETE FROM SDS WHERE SD_ID=?";
311+
static final String DELETE_TBLS = "DELETE FROM TBLS WHERE TBL_ID=?";
312+
static final String TABLE_SEQUENCE_IDS = "select t.tbl_id, s.sd_id, s.cd_id, s.serde_id"
313+
+ " from DBS d join TBLS t on d.db_id=t.db_id join SDS s on t.sd_id=s.sd_id"
314+
+ " where d.name=? and t.tbl_name=?";
240315
}
241316
}

metacat-connector-hive/src/main/java/com/netflix/metacat/connector/hive/sql/HiveConnectorFastTableService.java

+8
Original file line numberDiff line numberDiff line change
@@ -152,4 +152,12 @@ private boolean isIcebergTable(final TableInfo tableInfo) {
152152
&& DirectSqlTable.ICEBERG_TABLE_TYPE
153153
.equalsIgnoreCase(tableInfo.getMetadata().get(DirectSqlTable.PARAM_TABLE_TYPE));
154154
}
155+
156+
/**
157+
* {@inheritDoc}.
158+
*/
159+
@Override
160+
public void delete(final ConnectorRequestContext requestContext, final QualifiedName name) {
161+
directSqlTable.delete(name);
162+
}
155163
}

metacat-connector-hive/src/main/java/com/netflix/metacat/connector/hive/sql/TableSequenceIds.java

+33-2
Original file line numberDiff line numberDiff line change
@@ -15,15 +15,46 @@
1515
*/
1616
package com.netflix.metacat.connector.hive.sql;
1717

18-
import lombok.Data;
18+
import lombok.Getter;
19+
20+
import javax.annotation.Nullable;
1921

2022
/**
2123
* Class representing the ids for a table.
2224
*
2325
* @author amajumdar
2426
*/
25-
@Data
27+
@Getter
2628
public class TableSequenceIds {
2729
private final Long tableId;
2830
private final Long cdId;
31+
private final Long sdsId;
32+
private final Long serdeId;
33+
34+
/**
35+
* Constructor.
36+
* @param tableId table id
37+
* @param cdId column id
38+
*/
39+
public TableSequenceIds(final Long tableId,
40+
final Long cdId) {
41+
this(tableId, cdId, null, null);
42+
}
43+
44+
/**
45+
* Constructor.
46+
* @param tableId table id
47+
* @param cdId column id
48+
* @param sdsId sds id
49+
* @param serdeId serde id
50+
*/
51+
public TableSequenceIds(final Long tableId,
52+
final Long cdId,
53+
@Nullable final Long sdsId,
54+
@Nullable final Long serdeId) {
55+
this.tableId = tableId;
56+
this.cdId = cdId;
57+
this.sdsId = sdsId;
58+
this.serdeId = serdeId;
59+
}
2960
}

metacat-connector-hive/src/test/groovy/com/netflix/metacat/connector/hive/sql/DirectSqlTableSpec.groovy

+25-1
Original file line numberDiff line numberDiff line change
@@ -3,11 +3,15 @@ package com.netflix.metacat.connector.hive.sql
33
import com.google.common.collect.Maps
44
import com.netflix.metacat.common.QualifiedName
55
import com.netflix.metacat.common.server.connectors.ConnectorContext
6+
import com.netflix.metacat.common.server.connectors.exception.ConnectorException
67
import com.netflix.metacat.common.server.connectors.exception.InvalidMetaException
8+
import com.netflix.metacat.common.server.connectors.exception.TableNotFoundException
79
import com.netflix.metacat.common.server.properties.DefaultConfigImpl
810
import com.netflix.metacat.common.server.properties.MetacatProperties
911
import com.netflix.metacat.connector.hive.util.HiveConnectorFastServiceMetric
1012
import com.netflix.spectator.api.NoopRegistry
13+
import org.springframework.dao.CannotAcquireLockException
14+
import org.springframework.dao.DataAccessException
1115
import org.springframework.dao.EmptyResultDataAccessException
1216
import org.springframework.jdbc.core.JdbcTemplate
1317
import spock.lang.Specification
@@ -25,7 +29,8 @@ class DirectSqlTableSpec extends Specification {
2529
def context = new ConnectorContext('test', 'test', 'hive', config, registry, Maps.newHashMap())
2630
def metric = new HiveConnectorFastServiceMetric(registry)
2731
def jdbcTemplate = Mock(JdbcTemplate)
28-
def service = new DirectSqlTable(context, jdbcTemplate, metric)
32+
def directSqlSavePartition = Mock(DirectSqlSavePartition)
33+
def service = new DirectSqlTable(context, jdbcTemplate, metric, directSqlSavePartition)
2934
def catalogName = 'c'
3035
def databaseName = 'd'
3136
def tableName = 't'
@@ -120,4 +125,23 @@ class DirectSqlTableSpec extends Specification {
120125
1 * jdbcTemplate.update(DirectSqlTable.SQL.UPDATE_TABLE_PARAMS,_) >> {throw new Exception()}
121126
thrown(Exception)
122127
}
128+
129+
def "Test delete table"() {
130+
when:
131+
service.delete(qualifiedName)
132+
then:
133+
1 * jdbcTemplate.queryForObject(DirectSqlTable.SQL.TABLE_SEQUENCE_IDS,_,_,_) >> {throw new EmptyResultDataAccessException(1)}
134+
thrown(TableNotFoundException)
135+
when:
136+
service.delete(qualifiedName)
137+
then:
138+
1 * jdbcTemplate.queryForObject(DirectSqlTable.SQL.TABLE_SEQUENCE_IDS,_,_,_) >> new TableSequenceIds(1,1,1,1)
139+
1 * directSqlSavePartition.delete(qualifiedName) >> {throw new CannotAcquireLockException('a')}
140+
thrown(ConnectorException)
141+
when:
142+
service.delete(qualifiedName)
143+
then:
144+
1 * jdbcTemplate.queryForObject(DirectSqlTable.SQL.TABLE_SEQUENCE_IDS,_,_,_) >> new TableSequenceIds(1,1,1,1)
145+
noExceptionThrown()
146+
}
123147
}

metacat-functional-tests/metacat-test-cluster/docker-compose.yml

+1
Original file line numberDiff line numberDiff line change
@@ -53,6 +53,7 @@ services:
5353
-Dmetacat.type.converter=com.netflix.metacat.connector.pig.converters.PigTypeConverter
5454
-Dmetacat.definition.metadata.delete.enableForTable=false
5555
-Dmetacat.definition.metadata.delete.enableDeleteForQualifiedNames=hive-metastore/hsmoke_ddb,hive-metastore/hsmoke_ddb1/test_create_table1,cassandra-310,embedded-hive-metastore,embedded-fast-hive-metastore,s3-mysql-db,mysql-56-db
56+
-Dmetacat.hive.metastore.batchSize=10
5657
-Dmetacat.usermetadata.config.location=/etc/metacat/usermetadata.properties'
5758
labels:
5859
- "com.netflix.metacat.oss.test"

metacat-functional-tests/src/functionalTest/groovy/com/netflix/metacat/MetacatSmokeSpec.groovy

+26-8
Original file line numberDiff line numberDiff line change
@@ -316,21 +316,30 @@ class MetacatSmokeSpec extends Specification {
316316
given:
317317
def name = catalogName + '/' + databaseName + '/' + tableName
318318
createTable(catalogName, databaseName, tableName)
319+
def partitions = PigDataDtoProvider.getPartitions(catalogName, databaseName, tableName, 'field1=xyz/field3=abc', isLocalEnv ? 'file:/tmp/abc' : null, count)
320+
partitionApi.savePartitions(catalogName, databaseName, tableName, new PartitionsSaveRequestDto(partitions: partitions))
319321
api.deleteTable(catalogName, databaseName, tableName)
320322
def definitions = metadataApi.getDefinitionMetadataList(null, null, null, null, null, null, name,null)
321323
expect:
322324
definitions.size() == result
323325
cleanup:
324326
metadataApi.deleteDefinitionMetadata(name, true)
325327
where:
326-
catalogName | databaseName | tableName | result
327-
'embedded-hive-metastore' | 'smoke_ddb1' | 'test_create_table' | 0
328-
'embedded-fast-hive-metastore' | 'fsmoke_ddb1' | 'test_create_table' | 0
329-
'embedded-fast-hive-metastore' | 'shard' | 'test_create_table' | 0
330-
'hive-metastore' | 'hsmoke_ddb' | 'test_create_table' | 0
331-
'hive-metastore' | 'hsmoke_ddb1' | 'test_create_table1' | 0
332-
'hive-metastore' | 'hsmoke_ddb1' | 'test_create_table2' | 1
333-
's3-mysql-db' | 'smoke_ddb1' | 'test_create_table' | 0
328+
catalogName | databaseName | tableName | count | result
329+
'embedded-hive-metastore' | 'smoke_ddb1' | 'test_create_table' | 15 | 0
330+
'embedded-fast-hive-metastore' | 'fsmoke_ddb1' | 'test_create_table' | 15 | 0
331+
'embedded-fast-hive-metastore' | 'shard' | 'test_create_table' | 15 | 0
332+
'hive-metastore' | 'hsmoke_ddb' | 'test_create_table' | 15 | 0
333+
'hive-metastore' | 'hsmoke_ddb1' | 'test_create_table1' | 15 | 0
334+
'hive-metastore' | 'hsmoke_ddb1' | 'test_create_table2' | 15 | 1
335+
's3-mysql-db' | 'smoke_ddb1' | 'test_create_table' | 15 | 0
336+
'embedded-hive-metastore' | 'smoke_ddb1' | 'test_create_table' | 10 | 0
337+
'embedded-fast-hive-metastore' | 'fsmoke_ddb1' | 'test_create_table' | 10 | 0
338+
'embedded-fast-hive-metastore' | 'shard' | 'test_create_table' | 10 | 0
339+
'hive-metastore' | 'hsmoke_ddb' | 'test_create_table' | 10 | 0
340+
'hive-metastore' | 'hsmoke_ddb1' | 'test_create_table1' | 10 | 0
341+
'hive-metastore' | 'hsmoke_ddb1' | 'test_create_table2' | 10 | 1
342+
's3-mysql-db' | 'smoke_ddb1' | 'test_create_table' | 10 | 0
334343
}
335344

336345
@Unroll
@@ -957,6 +966,15 @@ class MetacatSmokeSpec extends Specification {
957966
'hive-metastore' | 'hsmoke_db5' | 'part' | 'one=xyz' | 10 | 0
958967
'hive-metastore' | 'hsmoke_db5' | 'part' | 'one=xyz' | 10 | 10
959968
'hive-metastore' | 'hsmoke_db5' | 'part' | 'one=xyz' | 10 | 5
969+
'embedded-hive-metastore' | 'smoke_db5' | 'part' | 'one=xyz' | 15 | 0
970+
'embedded-hive-metastore' | 'smoke_db5' | 'part' | 'one=xyz' | 15 | 15
971+
'embedded-hive-metastore' | 'smoke_db5' | 'part' | 'one=xyz' | 15 | 5
972+
'embedded-fast-hive-metastore' | 'fsmoke_db5' | 'part' | 'one=xyz' | 15 | 0
973+
'embedded-fast-hive-metastore' | 'fsmoke_db5' | 'part' | 'one=xyz' | 15 | 15
974+
'embedded-fast-hive-metastore' | 'fsmoke_db5' | 'part' | 'one=xyz' | 15 | 5
975+
'hive-metastore' | 'hsmoke_db5' | 'part' | 'one=xyz' | 15 | 0
976+
'hive-metastore' | 'hsmoke_db5' | 'part' | 'one=xyz' | 15 | 15
977+
'hive-metastore' | 'hsmoke_db5' | 'part' | 'one=xyz' | 15 | 5
960978
}
961979
962980
@Unroll

0 commit comments

Comments
 (0)