Modifier and Type | Method and Description |
---|---|
HashSet<ReadEntity> |
QueryPlan.getInputs() |
Modifier and Type | Method and Description |
---|---|
void |
QueryPlan.setInputs(HashSet<ReadEntity> inputs) |
Modifier and Type | Method and Description |
---|---|
Set<ReadEntity> |
QueryResultsCache.QueryInfo.getInputs() |
Modifier and Type | Method and Description |
---|---|
void |
QueryResultsCache.QueryInfo.setInputs(Set<ReadEntity> inputs) |
Constructor and Description |
---|
QueryInfo(long queryTime,
QueryResultsCache.LookupInfo lookupInfo,
HiveOperation hiveOperation,
List<org.apache.hadoop.hive.metastore.api.FieldSchema> resultSchema,
TableAccessInfo tableAccessInfo,
ColumnAccessInfo columnAccessInfo,
Set<ReadEntity> inputs) |
Modifier and Type | Method and Description |
---|---|
Set<ReadEntity> |
HookContext.getInputs() |
Set<ReadEntity> |
ReadEntity.getParents() |
Modifier and Type | Method and Description |
---|---|
void |
PostExecutePrinter.run(QueryState queryState,
Set<ReadEntity> inputs,
Set<WriteEntity> outputs,
LineageInfo linfo,
org.apache.hadoop.security.UserGroupInformation ugi) |
void |
PreExecutePrinter.run(QueryState queryState,
Set<ReadEntity> inputs,
Set<WriteEntity> outputs,
org.apache.hadoop.security.UserGroupInformation ugi) |
void |
EnforceReadOnlyTables.run(SessionState sess,
Set<ReadEntity> inputs,
Set<WriteEntity> outputs,
org.apache.hadoop.security.UserGroupInformation ugi,
boolean isExplain) |
void |
HookContext.setInputs(Set<ReadEntity> inputs) |
Constructor and Description |
---|
ReadEntity(Partition p,
ReadEntity parent) |
ReadEntity(Partition p,
ReadEntity parent,
boolean isDirect) |
ReadEntity(Table t,
ReadEntity parent) |
ReadEntity(Table t,
ReadEntity parent,
boolean isDirect) |
Modifier and Type | Method and Description |
---|---|
Set<ReadEntity> |
GenMRProcContext.getInputs()
Get the input set.
|
Modifier and Type | Method and Description |
---|---|
static void |
GenMapRedUtils.setMapWork(MapWork plan,
ParseContext parseCtx,
Set<ReadEntity> inputs,
PrunedPartitionList partsList,
TableScanOperator tsOp,
String alias_id,
HiveConf conf,
boolean local)
initialize MapWork
|
Constructor and Description |
---|
GenMRProcContext(HiveConf conf,
HashMap<Operator<? extends OperatorDesc>,Task<? extends Serializable>> opTaskMap,
ParseContext parseCtx,
List<Task<MoveWork>> mvTask,
List<Task<? extends Serializable>> rootTasks,
LinkedHashMap<Operator<? extends OperatorDesc>,GenMRProcContext.GenMapRedCtx> mapCurrCtx,
Set<ReadEntity> inputs,
Set<WriteEntity> outputs) |
Modifier and Type | Field and Description |
---|---|
protected HashSet<ReadEntity> |
BaseSemanticAnalyzer.inputs
ReadEntities that are passed to the hooks.
|
Set<ReadEntity> |
GenTezProcContext.inputs |
Set<ReadEntity> |
OptimizeTezProcContext.inputs |
Modifier and Type | Method and Description |
---|---|
protected ReadEntity |
BaseSemanticAnalyzer.toReadEntity(org.apache.hadoop.fs.Path location) |
static ReadEntity |
BaseSemanticAnalyzer.toReadEntity(org.apache.hadoop.fs.Path location,
HiveConf conf) |
protected ReadEntity |
BaseSemanticAnalyzer.toReadEntity(String location) |
Modifier and Type | Method and Description |
---|---|
HashSet<ReadEntity> |
BaseSemanticAnalyzer.getAllInputs() |
HashSet<ReadEntity> |
SemanticAnalyzer.getAllInputs() |
HashSet<ReadEntity> |
BaseSemanticAnalyzer.getInputs() |
Set<ReadEntity> |
HiveSemanticAnalyzerHookContext.getInputs()
The following methods will only be available to hooks executing postAnalyze.
|
HashSet<ReadEntity> |
EximUtil.SemanticAnalyzerWrapperContext.getInputs() |
Set<ReadEntity> |
HiveSemanticAnalyzerHookContextImpl.getInputs() |
HashSet<ReadEntity> |
ParseContext.getSemanticInputs() |
Map<String,ReadEntity> |
ParseContext.getViewAliasToInput() |
Modifier and Type | Method and Description |
---|---|
void |
TaskCompiler.compile(ParseContext pCtx,
List<Task<? extends Serializable>> rootTasks,
HashSet<ReadEntity> inputs,
HashSet<WriteEntity> outputs) |
protected void |
MapReduceCompiler.generateTaskTree(List<Task<? extends Serializable>> rootTasks,
ParseContext pCtx,
List<Task<MoveWork>> mvTask,
Set<ReadEntity> inputs,
Set<WriteEntity> outputs) |
protected void |
TezCompiler.generateTaskTree(List<Task<? extends Serializable>> rootTasks,
ParseContext pCtx,
List<Task<MoveWork>> mvTask,
Set<ReadEntity> inputs,
Set<WriteEntity> outputs) |
protected abstract void |
TaskCompiler.generateTaskTree(List<Task<? extends Serializable>> rootTasks,
ParseContext pCtx,
List<Task<MoveWork>> mvTask,
Set<ReadEntity> inputs,
Set<WriteEntity> outputs) |
protected void |
TezCompiler.optimizeOperatorPlan(ParseContext pCtx,
Set<ReadEntity> inputs,
Set<WriteEntity> outputs) |
protected void |
TaskCompiler.optimizeOperatorPlan(ParseContext pCtxSet,
Set<ReadEntity> inputs,
Set<WriteEntity> outputs) |
Modifier and Type | Method and Description |
---|---|
Task<? extends Serializable> |
HiveAuthorizationTaskFactory.createCreateRoleTask(ASTNode node,
HashSet<ReadEntity> inputs,
HashSet<WriteEntity> outputs) |
Task<? extends Serializable> |
HiveAuthorizationTaskFactoryImpl.createCreateRoleTask(ASTNode ast,
HashSet<ReadEntity> inputs,
HashSet<WriteEntity> outputs) |
Task<? extends Serializable> |
HiveAuthorizationTaskFactory.createDropRoleTask(ASTNode node,
HashSet<ReadEntity> inputs,
HashSet<WriteEntity> outputs) |
Task<? extends Serializable> |
HiveAuthorizationTaskFactoryImpl.createDropRoleTask(ASTNode ast,
HashSet<ReadEntity> inputs,
HashSet<WriteEntity> outputs) |
Task<? extends Serializable> |
HiveAuthorizationTaskFactory.createGrantRoleTask(ASTNode node,
HashSet<ReadEntity> inputs,
HashSet<WriteEntity> outputs) |
Task<? extends Serializable> |
HiveAuthorizationTaskFactoryImpl.createGrantRoleTask(ASTNode ast,
HashSet<ReadEntity> inputs,
HashSet<WriteEntity> outputs) |
Task<? extends Serializable> |
HiveAuthorizationTaskFactory.createGrantTask(ASTNode node,
HashSet<ReadEntity> inputs,
HashSet<WriteEntity> outputs) |
Task<? extends Serializable> |
HiveAuthorizationTaskFactoryImpl.createGrantTask(ASTNode ast,
HashSet<ReadEntity> inputs,
HashSet<WriteEntity> outputs) |
Task<? extends Serializable> |
HiveAuthorizationTaskFactory.createRevokeRoleTask(ASTNode node,
HashSet<ReadEntity> inputs,
HashSet<WriteEntity> outputs) |
Task<? extends Serializable> |
HiveAuthorizationTaskFactoryImpl.createRevokeRoleTask(ASTNode ast,
HashSet<ReadEntity> inputs,
HashSet<WriteEntity> outputs) |
Task<? extends Serializable> |
HiveAuthorizationTaskFactory.createRevokeTask(ASTNode node,
HashSet<ReadEntity> inputs,
HashSet<WriteEntity> outputs) |
Task<? extends Serializable> |
HiveAuthorizationTaskFactoryImpl.createRevokeTask(ASTNode ast,
HashSet<ReadEntity> inputs,
HashSet<WriteEntity> outputs) |
Task<? extends Serializable> |
HiveAuthorizationTaskFactory.createSetRoleTask(String roleName,
HashSet<ReadEntity> inputs,
HashSet<WriteEntity> outputs) |
Task<? extends Serializable> |
HiveAuthorizationTaskFactoryImpl.createSetRoleTask(String roleName,
HashSet<ReadEntity> inputs,
HashSet<WriteEntity> outputs) |
Task<? extends Serializable> |
HiveAuthorizationTaskFactory.createShowCurrentRoleTask(HashSet<ReadEntity> inputs,
HashSet<WriteEntity> outputs,
org.apache.hadoop.fs.Path resFile) |
Task<? extends Serializable> |
HiveAuthorizationTaskFactoryImpl.createShowCurrentRoleTask(HashSet<ReadEntity> inputs,
HashSet<WriteEntity> outputs,
org.apache.hadoop.fs.Path resFile) |
Task<? extends Serializable> |
HiveAuthorizationTaskFactory.createShowGrantTask(ASTNode node,
org.apache.hadoop.fs.Path resultFile,
HashSet<ReadEntity> inputs,
HashSet<WriteEntity> outputs) |
Task<? extends Serializable> |
HiveAuthorizationTaskFactoryImpl.createShowGrantTask(ASTNode ast,
org.apache.hadoop.fs.Path resultFile,
HashSet<ReadEntity> inputs,
HashSet<WriteEntity> outputs) |
Task<? extends Serializable> |
HiveAuthorizationTaskFactory.createShowRoleGrantTask(ASTNode node,
org.apache.hadoop.fs.Path resultFile,
HashSet<ReadEntity> inputs,
HashSet<WriteEntity> outputs) |
Task<? extends Serializable> |
HiveAuthorizationTaskFactoryImpl.createShowRoleGrantTask(ASTNode ast,
org.apache.hadoop.fs.Path resultFile,
HashSet<ReadEntity> inputs,
HashSet<WriteEntity> outputs) |
Task<? extends Serializable> |
HiveAuthorizationTaskFactory.createShowRolePrincipalsTask(ASTNode ast,
org.apache.hadoop.fs.Path resFile,
HashSet<ReadEntity> inputs,
HashSet<WriteEntity> outputs) |
Task<? extends Serializable> |
HiveAuthorizationTaskFactoryImpl.createShowRolePrincipalsTask(ASTNode ast,
org.apache.hadoop.fs.Path resFile,
HashSet<ReadEntity> inputs,
HashSet<WriteEntity> outputs) |
Task<? extends Serializable> |
HiveAuthorizationTaskFactory.createShowRolesTask(ASTNode ast,
org.apache.hadoop.fs.Path resFile,
HashSet<ReadEntity> inputs,
HashSet<WriteEntity> outputs) |
Task<? extends Serializable> |
HiveAuthorizationTaskFactoryImpl.createShowRolesTask(ASTNode ast,
org.apache.hadoop.fs.Path resFile,
HashSet<ReadEntity> inputs,
HashSet<WriteEntity> outputs) |
Modifier and Type | Field and Description |
---|---|
Set<ReadEntity> |
TableExport.AuthEntities.inputs
This is concurrent implementation as
|
Modifier and Type | Method and Description |
---|---|
Set<ReadEntity> |
MessageHandler.readEntities() |
Modifier and Type | Field and Description |
---|---|
Set<ReadEntity> |
GenSparkProcContext.inputs |
Modifier and Type | Method and Description |
---|---|
Set<ReadEntity> |
OptimizeSparkProcContext.getInputs() |
Modifier and Type | Method and Description |
---|---|
protected void |
SparkCompiler.generateTaskTree(List<Task<? extends Serializable>> rootTasks,
ParseContext pCtx,
List<Task<MoveWork>> mvTask,
Set<ReadEntity> inputs,
Set<WriteEntity> outputs)
TODO: need to turn on rules that's commented out and add more if necessary.
|
protected void |
SparkCompiler.optimizeOperatorPlan(ParseContext pCtx,
Set<ReadEntity> inputs,
Set<WriteEntity> outputs) |
Constructor and Description |
---|
GenSparkProcContext(HiveConf conf,
ParseContext parseContext,
List<Task<MoveWork>> moveTask,
List<Task<? extends Serializable>> rootTasks,
Set<ReadEntity> inputs,
Set<WriteEntity> outputs,
Map<String,TableScanOperator> topOps) |
OptimizeSparkProcContext(HiveConf conf,
ParseContext parseContext,
Set<ReadEntity> inputs,
Set<WriteEntity> outputs) |
Modifier and Type | Field and Description |
---|---|
protected HashSet<ReadEntity> |
DDLWork.inputs
ReadEntitites that are passed to the hooks.
|
protected HashSet<ReadEntity> |
MoveWork.inputs
ReadEntitites that are passed to the hooks.
|
Modifier and Type | Method and Description |
---|---|
static ReadEntity |
PlanUtils.addInput(Set<ReadEntity> inputs,
ReadEntity newInput) |
static ReadEntity |
PlanUtils.addInput(Set<ReadEntity> inputs,
ReadEntity newInput,
boolean mergeIsDirectFlag) |
static ReadEntity |
PlanUtils.getParentViewInfo(String alias_id,
Map<String,ReadEntity> viewAliasToInput) |
Modifier and Type | Method and Description |
---|---|
HashSet<ReadEntity> |
DDLWork.getInputs() |
HashSet<ReadEntity> |
MoveWork.getInputs() |
HashSet<ReadEntity> |
ExplainWork.getInputs() |
Modifier and Type | Method and Description |
---|---|
static ReadEntity |
PlanUtils.addInput(Set<ReadEntity> inputs,
ReadEntity newInput) |
static ReadEntity |
PlanUtils.addInput(Set<ReadEntity> inputs,
ReadEntity newInput,
boolean mergeIsDirectFlag) |
static void |
PlanUtils.addPartitionInputs(Collection<Partition> parts,
Collection<ReadEntity> inputs,
ReadEntity parentViewInfo,
boolean isDirectRead) |
Modifier and Type | Method and Description |
---|---|
static ReadEntity |
PlanUtils.addInput(Set<ReadEntity> inputs,
ReadEntity newInput) |
static ReadEntity |
PlanUtils.addInput(Set<ReadEntity> inputs,
ReadEntity newInput,
boolean mergeIsDirectFlag) |
static void |
PlanUtils.addPartitionInputs(Collection<Partition> parts,
Collection<ReadEntity> inputs,
ReadEntity parentViewInfo,
boolean isDirectRead) |
Task<? extends Serializable> |
ImportTableDesc.getCreateTableTask(HashSet<ReadEntity> inputs,
HashSet<WriteEntity> outputs,
HiveConf conf) |
static ReadEntity |
PlanUtils.getParentViewInfo(String alias_id,
Map<String,ReadEntity> viewAliasToInput) |
void |
DDLWork.setInputs(HashSet<ReadEntity> inputs) |
void |
MoveWork.setInputs(HashSet<ReadEntity> inputs) |
void |
ExplainWork.setInputs(HashSet<ReadEntity> inputs) |
Copyright © 2019 The Apache Software Foundation. All Rights Reserved.