public class DruidStorageHandler extends DefaultHiveMetaHook implements HiveStorageHandler
| Modifier and Type | Field and Description |
|---|---|
protected static SessionState.LogHelper |
console |
static String |
INTERMEDIATE_SEGMENT_DIR_NAME |
protected static org.slf4j.Logger |
LOG |
static String |
SEGMENTS_DESCRIPTOR_DIR_NAME |
ALTER_TABLE_OPERATION_TYPE| Constructor and Description |
|---|
DruidStorageHandler() |
DruidStorageHandler(io.druid.metadata.SQLMetadataConnector connector,
io.druid.metadata.MetadataStorageTablesConfig druidMetadataStorageTablesConfig) |
| Modifier and Type | Method and Description |
|---|---|
void |
commitCreateTable(org.apache.hadoop.hive.metastore.api.Table table)
Called after successfully adding a new table definition to the metastore
during CREATE TABLE.
|
void |
commitDropTable(org.apache.hadoop.hive.metastore.api.Table table,
boolean deleteData)
Called after successfully removing a table definition from the metastore
during DROP TABLE.
|
void |
commitInsertTable(org.apache.hadoop.hive.metastore.api.Table table,
boolean overwrite)
Called after successfully INSERT [OVERWRITE] statement is executed.
|
void |
configureInputJobCredentials(TableDesc tableDesc,
Map<String,String> jobSecrets)
This method is called to allow the StorageHandlers the chance to
populate secret keys into the job's credentials.
|
void |
configureInputJobProperties(TableDesc tableDesc,
Map<String,String> jobProperties)
This method is called to allow the StorageHandlers the chance
to populate the JobContext.getConfiguration() with properties that
maybe be needed by the handler's bundled artifacts (ie InputFormat, SerDe, etc).
|
void |
configureJobConf(TableDesc tableDesc,
org.apache.hadoop.mapred.JobConf jobConf)
Called just before submitting MapReduce job.
|
void |
configureOutputJobProperties(TableDesc tableDesc,
Map<String,String> jobProperties)
This method is called to allow the StorageHandlers the chance
to populate the JobContext.getConfiguration() with properties that
maybe be needed by the handler's bundled artifacts (ie InputFormat, SerDe, etc).
|
void |
configureTableJobProperties(TableDesc tableDesc,
Map<String,String> jobProperties)
Deprecated use configureInputJobProperties/configureOutputJobProperties
methods instead.
|
protected void |
deleteSegment(io.druid.timeline.DataSegment segment) |
HiveAuthorizationProvider |
getAuthorizationProvider()
Returns the implementation specific authorization provider
|
org.apache.hadoop.conf.Configuration |
getConf() |
static com.metamx.http.client.HttpClient |
getHttpClient() |
Class<? extends org.apache.hadoop.mapred.InputFormat> |
getInputFormatClass() |
org.apache.hadoop.hive.metastore.api.LockType |
getLockType(WriteEntity writeEntity) |
HiveMetaHook |
getMetaHook() |
Class<? extends org.apache.hadoop.mapred.OutputFormat> |
getOutputFormatClass() |
Class<? extends AbstractSerDe> |
getSerDeClass() |
StorageHandlerInfo |
getStorageHandlerInfo(org.apache.hadoop.hive.metastore.api.Table table)
Used to fetch runtime information about storage handler during DESCRIBE EXTENDED statement
|
String |
getUniqueId() |
protected List<io.druid.timeline.DataSegment> |
loadAndCommitDruidSegments(org.apache.hadoop.hive.metastore.api.Table table,
boolean overwrite,
List<io.druid.timeline.DataSegment> segmentsToLoad)
Creates metadata moves then commit the Segment's metadata to Druid metadata store in one TxN
|
protected String |
makeStagingName() |
void |
preAlterTable(org.apache.hadoop.hive.metastore.api.Table table,
org.apache.hadoop.hive.metastore.api.EnvironmentContext context)
Called before a table is altered in the metastore
during ALTER TABLE.
|
void |
preCreateTable(org.apache.hadoop.hive.metastore.api.Table table)
Called before a new table definition is added to the metastore
during CREATE TABLE.
|
void |
preDropTable(org.apache.hadoop.hive.metastore.api.Table table)
Called before a table definition is removed from the metastore
during DROP TABLE.
|
void |
preInsertTable(org.apache.hadoop.hive.metastore.api.Table table,
boolean overwrite)
called before commit insert method is called
|
void |
rollbackCreateTable(org.apache.hadoop.hive.metastore.api.Table table)
Called after failure adding a new table definition to the metastore
during CREATE TABLE.
|
void |
rollbackDropTable(org.apache.hadoop.hive.metastore.api.Table table)
Called after failure removing a table definition from the metastore
during DROP TABLE.
|
void |
rollbackInsertTable(org.apache.hadoop.hive.metastore.api.Table table,
boolean overwrite)
called in case pre commit or commit insert fail.
|
void |
setConf(org.apache.hadoop.conf.Configuration conf) |
String |
toString() |
protected static final org.slf4j.Logger LOG
protected static final SessionState.LogHelper console
public static final String SEGMENTS_DESCRIPTOR_DIR_NAME
public static final String INTERMEDIATE_SEGMENT_DIR_NAME
public DruidStorageHandler()
public DruidStorageHandler(io.druid.metadata.SQLMetadataConnector connector,
io.druid.metadata.MetadataStorageTablesConfig druidMetadataStorageTablesConfig)
public Class<? extends org.apache.hadoop.mapred.InputFormat> getInputFormatClass()
getInputFormatClass in interface HiveStorageHandlerInputFormatpublic Class<? extends org.apache.hadoop.mapred.OutputFormat> getOutputFormatClass()
getOutputFormatClass in interface HiveStorageHandlerOutputFormatpublic Class<? extends AbstractSerDe> getSerDeClass()
getSerDeClass in interface HiveStorageHandlerAbstractSerDepublic HiveMetaHook getMetaHook()
getMetaHook in interface HiveStorageHandlerpublic HiveAuthorizationProvider getAuthorizationProvider()
HiveStorageHandlergetAuthorizationProvider in interface HiveStorageHandlerpublic void configureInputJobProperties(TableDesc tableDesc, Map<String,String> jobProperties)
HiveStorageHandlerconfigureInputJobProperties in interface HiveStorageHandlertableDesc - descriptor for the table being accessedjobProperties - receives properties copied or transformed
from the table propertiespublic void configureInputJobCredentials(TableDesc tableDesc, Map<String,String> jobSecrets)
HiveStorageHandlerconfigureInputJobCredentials in interface HiveStorageHandlerpublic void preCreateTable(org.apache.hadoop.hive.metastore.api.Table table)
throws org.apache.hadoop.hive.metastore.api.MetaException
HiveMetaHookpreCreateTable in interface HiveMetaHooktable - new table definitionorg.apache.hadoop.hive.metastore.api.MetaExceptionpublic void rollbackCreateTable(org.apache.hadoop.hive.metastore.api.Table table)
HiveMetaHookrollbackCreateTable in interface HiveMetaHooktable - new table definitionpublic void commitCreateTable(org.apache.hadoop.hive.metastore.api.Table table)
throws org.apache.hadoop.hive.metastore.api.MetaException
HiveMetaHookcommitCreateTable in interface HiveMetaHooktable - new table definitionorg.apache.hadoop.hive.metastore.api.MetaExceptionprotected List<io.druid.timeline.DataSegment> loadAndCommitDruidSegments(org.apache.hadoop.hive.metastore.api.Table table, boolean overwrite, List<io.druid.timeline.DataSegment> segmentsToLoad) throws IOException, org.skife.jdbi.v2.exceptions.CallbackFailedException
table - Hive tableoverwrite - true if it is an insert overwrite tableorg.apache.hadoop.hive.metastore.api.MetaException - if errors occurs.IOExceptionorg.skife.jdbi.v2.exceptions.CallbackFailedExceptionprotected void deleteSegment(io.druid.timeline.DataSegment segment)
throws io.druid.segment.loading.SegmentLoadingException
io.druid.segment.loading.SegmentLoadingExceptionpublic void preDropTable(org.apache.hadoop.hive.metastore.api.Table table)
HiveMetaHookpreDropTable in interface HiveMetaHooktable - table definitionpublic void rollbackDropTable(org.apache.hadoop.hive.metastore.api.Table table)
HiveMetaHookrollbackDropTable in interface HiveMetaHooktable - table definitionpublic void commitDropTable(org.apache.hadoop.hive.metastore.api.Table table,
boolean deleteData)
HiveMetaHookcommitDropTable in interface HiveMetaHooktable - table definitiondeleteData - whether to delete data as well; this should typically
be ignored in the case of an external tablepublic void commitInsertTable(org.apache.hadoop.hive.metastore.api.Table table,
boolean overwrite)
throws org.apache.hadoop.hive.metastore.api.MetaException
DefaultHiveMetaHookcommitInsertTable in class DefaultHiveMetaHooktable - table definitionoverwrite - true if it is INSERT OVERWRITEorg.apache.hadoop.hive.metastore.api.MetaExceptionpublic void preInsertTable(org.apache.hadoop.hive.metastore.api.Table table,
boolean overwrite)
DefaultHiveMetaHookpreInsertTable in class DefaultHiveMetaHooktable - table definitionoverwrite - true if it is INSERT OVERWRITEpublic void rollbackInsertTable(org.apache.hadoop.hive.metastore.api.Table table,
boolean overwrite)
DefaultHiveMetaHookrollbackInsertTable in class DefaultHiveMetaHooktable - table definitionoverwrite - true if it is INSERT OVERWRITEpublic void configureOutputJobProperties(TableDesc tableDesc, Map<String,String> jobProperties)
HiveStorageHandlerconfigureOutputJobProperties in interface HiveStorageHandlertableDesc - descriptor for the table being accessedjobProperties - receives properties copied or transformed
from the table propertiespublic void configureTableJobProperties(TableDesc tableDesc, Map<String,String> jobProperties)
HiveStorageHandlerconfigureTableJobProperties in interface HiveStorageHandlertableDesc - descriptor for the table being accessedjobProperties - receives properties copied or transformed
from the table propertiespublic void configureJobConf(TableDesc tableDesc, org.apache.hadoop.mapred.JobConf jobConf)
HiveStorageHandlerconfigureJobConf in interface HiveStorageHandlertableDesc - descriptor for the table being accessedjobConf - jobConf for MapReduce jobpublic void setConf(org.apache.hadoop.conf.Configuration conf)
setConf in interface org.apache.hadoop.conf.Configurablepublic org.apache.hadoop.conf.Configuration getConf()
getConf in interface org.apache.hadoop.conf.Configurablepublic org.apache.hadoop.hive.metastore.api.LockType getLockType(WriteEntity writeEntity)
getLockType in interface HiveStorageHandlerpublic String getUniqueId()
protected String makeStagingName()
public static com.metamx.http.client.HttpClient getHttpClient()
public void preAlterTable(org.apache.hadoop.hive.metastore.api.Table table,
org.apache.hadoop.hive.metastore.api.EnvironmentContext context)
throws org.apache.hadoop.hive.metastore.api.MetaException
HiveMetaHookpreAlterTable in interface HiveMetaHooktable - new table definitionorg.apache.hadoop.hive.metastore.api.MetaExceptionpublic StorageHandlerInfo getStorageHandlerInfo(org.apache.hadoop.hive.metastore.api.Table table) throws org.apache.hadoop.hive.metastore.api.MetaException
HiveStorageHandlergetStorageHandlerInfo in interface HiveStorageHandlertable - table definitionorg.apache.hadoop.hive.metastore.api.MetaExceptionCopyright © 2019 The Apache Software Foundation. All Rights Reserved.