FE Cleanup: Use analyzeNoThrow() and investigate Expr.substitute() TODO.

I looked at those places that call Expr.substitute/List() and I think
that for all of them we do not want preserveRootType, so I simply
removed the TODO.

Change-Id: I7c98e2aee8f11b14781ec49840e82e69adc6b751
Reviewed-on: http://gerrit.sjc.cloudera.com:8080/4841
Tested-by: jenkins
Reviewed-by: Alex Behm <alex.behm@cloudera.com>
Tested-by: Alex Behm <alex.behm@cloudera.com>
This commit is contained in:
Alex Behm
2014-10-10 14:52:13 -07:00
committed by ishaan
parent 0cb8ba98ec
commit a4ee0a65f4
13 changed files with 45 additions and 106 deletions

View File

@@ -122,7 +122,7 @@ public class AggregateInfo extends AggregateInfoBase {
static public AggregateInfo create(
ArrayList<Expr> groupingExprs, ArrayList<FunctionCallExpr> aggExprs,
TupleDescriptor tupleDesc, Analyzer analyzer)
throws AnalysisException, InternalException {
throws AnalysisException {
Preconditions.checkState(
(groupingExprs != null && !groupingExprs.isEmpty())
|| (aggExprs != null && !aggExprs.isEmpty()));
@@ -188,7 +188,7 @@ public class AggregateInfo extends AggregateInfoBase {
private void createDistinctAggInfo(
ArrayList<Expr> origGroupingExprs,
ArrayList<FunctionCallExpr> distinctAggExprs, Analyzer analyzer)
throws AnalysisException, InternalException {
throws AnalysisException {
Preconditions.checkState(!distinctAggExprs.isEmpty());
// make sure that all DISTINCT params are the same;
// ignore top-level implicit casts in the comparison, we might have inserted
@@ -319,7 +319,7 @@ public class AggregateInfo extends AggregateInfoBase {
* The returned AggregateInfo shares its descriptor and smap with the input info;
* createAggTupleDesc() must not be called on it.
*/
private void createMergeAggInfo(Analyzer analyzer) throws InternalException {
private void createMergeAggInfo(Analyzer analyzer) {
Preconditions.checkState(mergeAggInfo_ == null);
TupleDescriptor inputDesc = intermediateTupleDesc_;
// construct grouping exprs
@@ -338,13 +338,7 @@ public class AggregateInfo extends AggregateInfoBase {
new SlotRef(inputDesc.getSlots().get(i + getGroupingExprs().size()));
FunctionCallExpr aggExpr = FunctionCallExpr.createMergeAggCall(
inputExpr, Lists.newArrayList(aggExprParam));
try {
aggExpr.analyze(analyzer);
} catch (Exception e) {
// we shouldn't see this
throw new InternalException(
"error constructing merge aggregation node: " + e.getMessage());
}
aggExpr.analyzeNoThrow(analyzer);
aggExprs.add(aggExpr);
}
@@ -404,7 +398,7 @@ public class AggregateInfo extends AggregateInfoBase {
private void createSecondPhaseAggInfo(
ArrayList<Expr> origGroupingExprs,
ArrayList<FunctionCallExpr> distinctAggExprs, Analyzer analyzer)
throws AnalysisException, InternalException {
throws AnalysisException {
Preconditions.checkState(secondPhaseDistinctAggInfo_ == null);
Preconditions.checkState(!distinctAggExprs.isEmpty());
// The output of the 1st phase agg is the 1st phase intermediate.
@@ -426,12 +420,7 @@ public class AggregateInfo extends AggregateInfoBase {
origGroupingExprs.size() + inputExpr.getChildren().size() - 1,
inputDesc.getSlots());
Preconditions.checkNotNull(ifExpr);
try {
ifExpr.analyze(analyzer);
} catch (Exception e) {
throw new InternalException("Failed to analyze 'IF' function " +
"in second phase count distinct aggregation.", e);
}
ifExpr.analyzeNoThrow(analyzer);
aggExpr = new FunctionCallExpr("count", Lists.newArrayList(ifExpr));
} else {
// SUM(DISTINCT <expr>) -> SUM(<last grouping slot>);
@@ -460,14 +449,8 @@ public class AggregateInfo extends AggregateInfoBase {
secondPhaseAggExprs.size() == aggregateExprs_.size() + distinctAggExprs.size());
for (FunctionCallExpr aggExpr: secondPhaseAggExprs) {
try {
aggExpr.analyze(analyzer);
Preconditions.checkState(aggExpr.isAggregateFunction());
} catch (Exception e) {
// we shouldn't see this
throw new InternalException(
"error constructing merge aggregation node", e);
}
aggExpr.analyzeNoThrow(analyzer);
Preconditions.checkState(aggExpr.isAggregateFunction());
}
ArrayList<Expr> substGroupingExprs =

View File

@@ -830,13 +830,6 @@ public class Analyzer {
BinaryPredicate p = new BinaryPredicate(BinaryPredicate.Operator.EQ, lhs, rhs);
p.setIsAuxExpr();
LOG.trace("register equiv predicate: " + p.toSql() + " " + p.debugString());
try {
// create casts if needed
p.analyze(this);
} catch (ImpalaException e) {
// not an executable predicate; ignore
return;
}
registerConjunct(p);
}
@@ -847,7 +840,7 @@ public class Analyzer {
BinaryPredicate pred = new BinaryPredicate(BinaryPredicate.Operator.EQ,
new SlotRef(globalState_.descTbl.getSlotDesc(lhsSlotId)),
new SlotRef(globalState_.descTbl.getSlotDesc(rhsSlotId)));
// analyze() creates casts, if needed
// create casts if needed
pred.analyzeNoThrow(this);
return pred;
}
@@ -1204,13 +1197,13 @@ public class Analyzer {
}
/**
* For each equivalence class, adds/removes predicates from conjuncts such that
* it contains a minimum set of <lhsSlot> = <rhsSlot> predicates that establish
* the known equivalences between slots in lhsTids and rhsTid. Preserves original
* conjuncts when possible. Assumes that predicates for establishing equivalences
* among slots in only lhsTids and only rhsTid have already been established.
* This function adds the remaining predicates to "connect" the disjoint equivalent
* slot sets of lhsTids and rhsTid.
* For each equivalence class, adds/removes predicates from conjuncts such that it
* contains a minimum set of <lhsSlot> = <rhsSlot> predicates that establish the known
* equivalences between slots in lhsTids and rhsTid (lhsTids must not contain rhsTid).
* Preserves original conjuncts when possible. Assumes that predicates for establishing
* equivalences among slots in only lhsTids and only rhsTid have already been
* established. This function adds the remaining predicates to "connect" the disjoin
* equivalent slot sets of lhsTids and rhsTid.
* The intent of this function is to enable construction of a minimum spanning tree
* to cover the known slot equivalences. This function should be called for join
* nodes during plan generation to (1) remove redundant join predicates, and (2)
@@ -1340,10 +1333,8 @@ public class Analyzer {
EquivalenceClassId secondEqClassId = getEquivClassId(eqSlots.second);
// slots may not be in the same eq class due to outer joins
if (!firstEqClassId.equals(secondEqClassId)) continue;
if (!partialEquivSlots.union(eqSlots.first, eqSlots.second)) {
// conjunct is redundant
conjunctIter.remove();
}
// update equivalences and remove redundant conjuncts
if (!partialEquivSlots.union(eqSlots.first, eqSlots.second)) conjunctIter.remove();
}
// Suppose conjuncts had these predicates belonging to equivalence classes e1 and e2:
// e1: s1 = s2, s3 = s4, s3 = s5
@@ -1492,19 +1483,14 @@ public class Analyzer {
p.collect(Predicates.instanceOf(SlotRef.class), slotRefs);
Expr nullTuplePred = null;
try {
// Map for substituting SlotRefs with NullLiterals.
ExprSubstitutionMap nullSmap = new ExprSubstitutionMap();
NullLiteral nullLiteral = new NullLiteral();
nullLiteral.analyze(this);
for (SlotRef slotRef: slotRefs) {
nullSmap.put(slotRef.clone(), nullLiteral.clone());
}
nullTuplePred = p.substitute(nullSmap, this, false);
} catch (Exception e) {
Preconditions.checkState(false, "Failed to analyze generated predicate: "
+ nullTuplePred.toSql() + "." + e.getMessage());
// Map for substituting SlotRefs with NullLiterals.
ExprSubstitutionMap nullSmap = new ExprSubstitutionMap();
NullLiteral nullLiteral = new NullLiteral();
nullLiteral.analyzeNoThrow(this);
for (SlotRef slotRef: slotRefs) {
nullSmap.put(slotRef.clone(), nullLiteral.clone());
}
nullTuplePred = p.substitute(nullSmap, this, false);
try {
return FeSupport.EvalPredicate(nullTuplePred, getQueryCtx());
} catch (InternalException e) {

View File

@@ -672,7 +672,6 @@ abstract public class Expr extends TreeNode<Expr> implements ParseNode, Cloneabl
* If smap is null, this function is equivalent to clone().
* If preserveRootType is true, the resulting expr tree will be cast if necessary to
* the type of 'this'.
* TODO: set preserveRootType to true in more places?
*/
public Expr trySubstitute(ExprSubstitutionMap smap, Analyzer analyzer,
boolean preserveRootType)

View File

@@ -241,14 +241,14 @@ public class InlineViewRef extends TableRef {
}
protected void makeOutputNullableHelper(Analyzer analyzer, ExprSubstitutionMap smap)
throws InternalException, AnalysisException {
throws InternalException {
// Gather all unique rhs SlotRefs into rhsSlotRefs
List<SlotRef> rhsSlotRefs = Lists.newArrayList();
TreeNode.collect(smap.getRhs(), Predicates.instanceOf(SlotRef.class), rhsSlotRefs);
// Map for substituting SlotRefs with NullLiterals.
ExprSubstitutionMap nullSMap = new ExprSubstitutionMap();
NullLiteral nullLiteral = new NullLiteral();
nullLiteral.analyze(analyzer);
nullLiteral.analyzeNoThrow(analyzer);
for (SlotRef rhsSlotRef: rhsSlotRefs) {
nullSMap.put(rhsSlotRef.clone(), nullLiteral.clone());
}
@@ -261,7 +261,7 @@ public class InlineViewRef extends TableRef {
params.add(new NullLiteral());
params.add(smap.getRhs().get(i));
Expr ifExpr = new FunctionCallExpr("if", params);
ifExpr.analyze(analyzer);
ifExpr.analyzeNoThrow(analyzer);
smap.getRhs().set(i, ifExpr);
}
}
@@ -272,7 +272,7 @@ public class InlineViewRef extends TableRef {
* false otherwise.
*/
private boolean requiresNullWrapping(Analyzer analyzer, Expr expr,
ExprSubstitutionMap nullSMap) throws InternalException, AnalysisException {
ExprSubstitutionMap nullSMap) throws InternalException {
// If the expr is already wrapped in an IF(TupleIsNull(), NULL, expr)
// then do not try to execute it.
// TODO: return true in this case?
@@ -284,7 +284,7 @@ public class InlineViewRef extends TableRef {
new IsNullPredicate(expr.substitute(nullSMap, analyzer, false), true);
Preconditions.checkState(isNotNullLiteralPred.isConstant());
// analyze to insert casts, etc.
isNotNullLiteralPred.analyze(analyzer);
isNotNullLiteralPred.analyzeNoThrow(analyzer);
return FeSupport.EvalPredicate(isNotNullLiteralPred, analyzer.getQueryCtx());
}

View File

@@ -16,7 +16,6 @@ package com.cloudera.impala.analysis;
import java.io.IOException;
import java.util.ArrayList;
import java.util.EnumSet;
import java.util.List;
import java.util.Set;
@@ -35,7 +34,6 @@ import com.cloudera.impala.catalog.View;
import com.cloudera.impala.common.AnalysisException;
import com.cloudera.impala.common.FileSystemUtil;
import com.cloudera.impala.planner.DataSink;
import com.cloudera.impala.thrift.THdfsFileFormat;
import com.google.common.base.Joiner;
import com.google.common.base.Preconditions;
import com.google.common.collect.Lists;
@@ -48,10 +46,6 @@ import com.google.common.collect.Sets;
public class InsertStmt extends StatementBase {
private final static Logger LOG = LoggerFactory.getLogger(InsertStmt.class);
// Insert formats currently supported by Impala.
private final static EnumSet<THdfsFileFormat> SUPPORTED_INSERT_FORMATS =
EnumSet.of(THdfsFileFormat.PARQUET, THdfsFileFormat.TEXT);
// List of inline views that may be referenced in queryStmt.
private final WithClause withClause_;

View File

@@ -38,28 +38,19 @@ public class NullLiteral extends LiteralExpr {
*/
public static NullLiteral create(Type type) {
NullLiteral l = new NullLiteral();
try {
l.analyze(null);
} catch (Exception e) {
Preconditions.checkState(false, "NullLiteral failed to analyze!", e);
return null;
}
l.analyzeNoThrow(null);
l.uncheckedCastTo(type);
return l;
}
@Override
public boolean equals(Object obj) {
if (!super.equals(obj)) {
return false;
}
if (!super.equals(obj)) return false;
return obj instanceof NullLiteral;
}
@Override
public String toSqlImpl() {
return getStringValue();
}
public String toSqlImpl() { return getStringValue(); }
@Override
public String debugString() {
@@ -67,9 +58,7 @@ public class NullLiteral extends LiteralExpr {
}
@Override
public String getStringValue() {
return "NULL";
}
public String getStringValue() { return "NULL"; }
@Override
protected Expr uncheckedCastTo(Type targetType) {

View File

@@ -24,7 +24,6 @@ import org.slf4j.LoggerFactory;
import com.cloudera.impala.catalog.Column;
import com.cloudera.impala.common.AnalysisException;
import com.cloudera.impala.common.ColumnAliasGenerator;
import com.cloudera.impala.common.InternalException;
import com.cloudera.impala.common.TableAliasGenerator;
import com.cloudera.impala.common.TreeNode;
import com.google.common.base.Preconditions;
@@ -540,15 +539,11 @@ public class SelectStmt extends QueryStmt {
// ii) Other DISTINCT aggregates are present.
ExprSubstitutionMap countAllMap = createCountAllMap(aggExprs, analyzer);
countAllMap = ExprSubstitutionMap.compose(ndvSmap, countAllMap, analyzer);
List<Expr> substitutedAggs = Expr.substituteList(aggExprs, countAllMap, analyzer, false);
List<Expr> substitutedAggs =
Expr.substituteList(aggExprs, countAllMap, analyzer, false);
aggExprs.clear();
TreeNode.collect(substitutedAggs, Expr.isAggregatePredicate(), aggExprs);
try {
createAggInfo(groupingExprsCopy, aggExprs, analyzer);
} catch (InternalException e) {
// should never happen
Preconditions.checkArgument(false);
}
createAggInfo(groupingExprsCopy, aggExprs, analyzer);
// combine avg smap with the one that produces the final agg output
AggregateInfo finalAggInfo =
@@ -665,7 +660,7 @@ public class SelectStmt extends QueryStmt {
*/
private void createAggInfo(ArrayList<Expr> groupingExprs,
ArrayList<FunctionCallExpr> aggExprs, Analyzer analyzer)
throws AnalysisException, InternalException {
throws AnalysisException {
if (selectList_.isDistinct()) {
// Create aggInfo for SELECT DISTINCT ... stmt:
// - all select list items turn into grouping exprs

View File

@@ -311,9 +311,6 @@ public class UnionStmt extends QueryStmt {
} catch (AnalysisException e) {
// this should never happen
throw new AnalysisException("error creating agg info in UnionStmt.analyze()");
} catch (InternalException e) {
// this should never happen
throw new AnalysisException("error creating agg info in UnionStmt.analyze()");
}
}
}

View File

@@ -39,7 +39,6 @@ import com.cloudera.impala.analysis.SlotRef;
import com.cloudera.impala.analysis.SortInfo;
import com.cloudera.impala.analysis.TupleDescriptor;
import com.cloudera.impala.analysis.TupleId;
import com.cloudera.impala.common.AnalysisException;
import com.cloudera.impala.common.IdGenerator;
import com.cloudera.impala.common.ImpalaException;
import com.cloudera.impala.thrift.TPartitionType;
@@ -414,11 +413,7 @@ public class AnalyticPlanner {
ExprSubstitutionMap bufferedSmap) {
Preconditions.checkState(!exprs.isEmpty());
Expr result = createNullMatchingEqualsAux(exprs, 0, inputTid, bufferedSmap);
try {
result.analyze(analyzer_);
} catch (AnalysisException e) {
throw new IllegalStateException(e);
}
result.analyzeNoThrow(analyzer_);
return result;
}

View File

@@ -423,7 +423,7 @@ abstract public class PlanNode extends TreeNode<PlanNode> {
* Sets outputSmap_ to compose(existing smap, combined child smap). Also
* substitutes conjuncts_ using the combined child smap.
*/
protected void createDefaultSmap(Analyzer analyzer) throws InternalException {
protected void createDefaultSmap(Analyzer analyzer) {
ExprSubstitutionMap combinedChildSmap = getCombinedChildSmap();
outputSmap_ =
ExprSubstitutionMap.compose(outputSmap_, combinedChildSmap, analyzer);

View File

@@ -2051,8 +2051,10 @@ public class Planner {
UnionNode allMerge =
createUnionPlan(analyzer, unionStmt, unionStmt.getAllOperands());
// for unionStmt, baseTblResultExprs = resultExprs
if (result != null) allMerge.addChild(result,
unionStmt.getDistinctAggInfo().getGroupingExprs());
if (result != null) {
allMerge.addChild(result,
unionStmt.getDistinctAggInfo().getGroupingExprs());
}
result = allMerge;
}

View File

@@ -19,8 +19,6 @@ import static org.junit.Assert.fail;
import java.lang.reflect.Field;
import junit.framework.Assert;
import org.junit.Test;
import com.cloudera.impala.catalog.PrimitiveType;

View File

@@ -349,6 +349,7 @@ public class PlannerTest {
}
} catch (ImpalaException e) {
if (e instanceof AnalysisException) {
e.printStackTrace();
errorLog.append(
"query:\n" + query + "\nanalysis error: " + e.getMessage() + "\n");
return;