Impala
Impalaistheopensource,nativeanalyticdatabaseforApacheHadoop.
 All Classes Namespaces Files Functions Variables Typedefs Enumerations Enumerator Friends Macros
AnalyzerTest.java
Go to the documentation of this file.
1 // Copyright (c) 2012 Cloudera, Inc. All rights reserved.
2 //
3 // Licensed under the Apache License, Version 2.0 (the "License");
4 // you may not use this file except in compliance with the License.
5 // You may obtain a copy of the License at
6 //
7 // http://www.apache.org/licenses/LICENSE-2.0
8 //
9 // Unless required by applicable law or agreed to in writing, software
10 // distributed under the License is distributed on an "AS IS" BASIS,
11 // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 // See the License for the specific language governing permissions and
13 // limitations under the License.
14 
15 package com.cloudera.impala.analysis;
16 
17 import static org.junit.Assert.assertNotNull;
18 import static org.junit.Assert.fail;
19 
20 import java.io.StringReader;
21 import java.util.ArrayList;
22 import java.util.HashMap;
23 import java.util.List;
24 import java.util.Map;
25 
26 import junit.framework.Assert;
27 
28 import org.junit.After;
29 import org.junit.Test;
30 import org.slf4j.Logger;
31 import org.slf4j.LoggerFactory;
32 
49 import com.cloudera.impala.thrift.TExpr;
50 import com.cloudera.impala.thrift.TQueryCtx;
51 import com.cloudera.impala.thrift.TQueryOptions;
52 import com.google.common.base.Joiner;
53 import com.google.common.base.Preconditions;
54 import com.google.common.collect.Lists;
55 
56 public class AnalyzerTest {
57  protected final static Logger LOG = LoggerFactory.getLogger(AnalyzerTest.class);
58  protected static ImpaladCatalog catalog_ = new ImpaladTestCatalog();
59 
60  // Test-local list of test databases and tables. These are cleaned up in @After.
61  protected final List<Db> testDbs_ = Lists.newArrayList();
62  protected final List<Table> testTables_ = Lists.newArrayList();
63 
64  protected Analyzer analyzer_;
65 
66  // maps from type to string that will result in literal of that type
67  protected static Map<ScalarType, String> typeToLiteralValue_ =
68  new HashMap<ScalarType, String>();
69  static {
70  typeToLiteralValue_.put(Type.BOOLEAN, "true");
71  typeToLiteralValue_.put(Type.TINYINT, "1");
72  typeToLiteralValue_.put(Type.SMALLINT, (Byte.MAX_VALUE + 1) + "");
73  typeToLiteralValue_.put(Type.INT, (Short.MAX_VALUE + 1) + "");
74  typeToLiteralValue_.put(Type.BIGINT,
75  ((long) Integer.MAX_VALUE + 1) + "");
76  typeToLiteralValue_.put(Type.FLOAT, "cast(1.0 as float)");
77  typeToLiteralValue_.put(Type.DOUBLE,
78  "cast(" + (Float.MAX_VALUE + 1) + " as double)");
79  typeToLiteralValue_.put(Type.TIMESTAMP,
80  "cast('2012-12-21 00:00:00.000' as timestamp)");
81  typeToLiteralValue_.put(Type.STRING, "'Hello, World!'");
82  typeToLiteralValue_.put(Type.NULL, "NULL");
83  }
84 
85  protected Analyzer createAnalyzer(String defaultDb) {
86  TQueryCtx queryCtx =
87  TestUtils.createQueryContext(defaultDb, System.getProperty("user.name"));
88  return new Analyzer(catalog_, queryCtx,
90  }
91 
92  protected Analyzer createAnalyzer(TQueryOptions queryOptions) {
93  TQueryCtx queryCtx = TestUtils.createQueryContext();
94  queryCtx.request.query_options = queryOptions;
95  return new Analyzer(catalog_, queryCtx,
97  }
98 
101  analyzer.setUseHiveColLabels(true);
102  return analyzer;
103  }
104 
105  // Adds a Udf: default.name(args) to the catalog.
106  // TODO: we could consider having this be the sql to run instead but that requires
107  // connecting to the BE.
108  protected Function addTestFunction(String name,
109  ArrayList<ScalarType> args, boolean varArgs) {
110  return addTestFunction("default", name, args, varArgs);
111  }
112 
113  protected Function addTestFunction(String name,
114  ScalarType arg, boolean varArgs) {
115  return addTestFunction("default", name, Lists.newArrayList(arg), varArgs);
116  }
117 
118  protected Function addTestFunction(String db, String fnName,
119  ArrayList<ScalarType> args, boolean varArgs) {
120  ArrayList<Type> argTypes = Lists.newArrayList();
121  argTypes.addAll(args);
122  Function fn = new ScalarFunction(
123  new FunctionName(db, fnName), argTypes, Type.INT, null, null, null, null);
124  fn.setHasVarArgs(varArgs);
125  catalog_.addFunction(fn);
126  return fn;
127  }
128 
129  protected void addTestUda(String name, Type retType, Type... argTypes) {
130  FunctionName fnName = new FunctionName("default", name);
131  catalog_.addFunction(
132  new AggregateFunction(fnName, Lists.newArrayList(argTypes), retType, false));
133  }
134 
140  protected Db addTestDb(String dbName) {
141  Db db = catalog_.getDb(dbName);
142  Preconditions.checkState(db == null, "Test db must not already exist.");
143  db = new Db(dbName, catalog_);
144  catalog_.addDb(db);
145  testDbs_.add(db);
146  return db;
147  }
148 
149  protected void clearTestDbs() {
150  for (Db testDb: testDbs_) {
151  catalog_.removeDb(testDb.getName());
152  }
153  }
154 
161  protected Table addTestTable(String createTableSql) {
162  CreateTableStmt createTableStmt = (CreateTableStmt) AnalyzesOk(createTableSql);
163  // Currently does not support partitioned tables.
164  Preconditions.checkState(createTableStmt.getPartitionColumnDefs().isEmpty());
165  Db db = catalog_.getDb(createTableStmt.getDb());
166  Preconditions.checkNotNull(db, "Test tables must be created in an existing db.");
167  HdfsTable dummyTable = new HdfsTable(null, null, db, createTableStmt.getTbl(),
168  createTableStmt.getOwner());
169  List<ColumnDef> columnDefs = createTableStmt.getColumnDefs();
170  for (int i = 0; i < columnDefs.size(); ++i) {
171  ColumnDef colDef = columnDefs.get(i);
172  dummyTable.addColumn(new Column(colDef.getColName(), colDef.getType(), i));
173  }
174  db.addTable(dummyTable);
175  testTables_.add(dummyTable);
176  return dummyTable;
177  }
178 
179  protected void clearTestTables() {
180  for (Table testTable: testTables_) {
181  testTable.getDb().removeTable(testTable.getName());
182  }
183  }
184 
185  @After
186  public void tearDown() {
187  clearTestTables();
188  clearTestDbs();
189  }
190 
194  protected void checkSelectToThrift(SelectStmt node) {
195  // convert select list exprs and where clause to thrift
196  List<Expr> selectListExprs = node.getResultExprs();
197  List<TExpr> thriftExprs = Expr.treesToThrift(selectListExprs);
198  LOG.info("select list:\n");
199  for (TExpr expr: thriftExprs) {
200  LOG.info(expr.toString() + "\n");
201  }
202  for (Expr expr: selectListExprs) {
203  checkBinaryExprs(expr);
204  }
205  if (node.getWhereClause() != null) {
206  TExpr thriftWhere = node.getWhereClause().treeToThrift();
207  LOG.info("WHERE pred: " + thriftWhere.toString() + "\n");
209  }
210  AggregateInfo aggInfo = node.getAggInfo();
211  if (aggInfo != null) {
212  if (aggInfo.getGroupingExprs() != null) {
213  LOG.info("grouping exprs:\n");
214  for (Expr expr: aggInfo.getGroupingExprs()) {
215  LOG.info(expr.treeToThrift().toString() + "\n");
216  checkBinaryExprs(expr);
217  }
218  }
219  LOG.info("aggregate exprs:\n");
220  for (Expr expr: aggInfo.getAggregateExprs()) {
221  LOG.info(expr.treeToThrift().toString() + "\n");
222  checkBinaryExprs(expr);
223  }
224  if (node.getHavingPred() != null) {
225  TExpr thriftHaving = node.getHavingPred().treeToThrift();
226  LOG.info("HAVING pred: " + thriftHaving.toString() + "\n");
228  }
229  }
230  }
231 
235  public ParseNode ParsesOk(String stmt) {
236  SqlScanner input = new SqlScanner(new StringReader(stmt));
237  SqlParser parser = new SqlParser(input);
238  ParseNode node = null;
239  try {
240  node = (ParseNode) parser.parse().value;
241  } catch (Exception e) {
242  e.printStackTrace();
243  fail("\nParser error:\n" + parser.getErrorMsg(stmt));
244  }
245  assertNotNull(node);
246  return node;
247  }
248 
252  public ParseNode AnalyzesOk(String stmt) {
253  return AnalyzesOk(stmt, createAnalyzer(Catalog.DEFAULT_DB), null);
254  }
255 
260  public ParseNode AnalyzesOk(String stmt, String expectedWarning) {
261  return AnalyzesOk(stmt, createAnalyzer(Catalog.DEFAULT_DB), expectedWarning);
262  }
263 
268  public ParseNode AnalyzesOk(String stmt, Analyzer analyzer, String expectedWarning) {
269  try {
270  analyzer_ = analyzer;
271  AnalysisContext analysisCtx = new AnalysisContext(catalog_,
273  System.getProperty("user.name")),
275  analysisCtx.analyze(stmt, analyzer);
276  AnalysisContext.AnalysisResult analysisResult = analysisCtx.getAnalysisResult();
277  if (expectedWarning != null) {
278  List<String> actualWarnings = analysisResult.getAnalyzer().getWarnings();
279  boolean matchedWarning = false;
280  for (String actualWarning: actualWarnings) {
281  if (actualWarning.startsWith(expectedWarning)) {
282  matchedWarning = true;
283  break;
284  }
285  }
286  if (!matchedWarning) {
287  fail(String.format("Did not produce expected warning.\n" +
288  "Expected warning:\n%s.\nActual warnings:\n%s",
289  expectedWarning, Joiner.on("\n").join(actualWarnings)));
290  }
291  }
292  Preconditions.checkNotNull(analysisResult.getStmt());
293  return analysisResult.getStmt();
294  } catch (Exception e) {
295  e.printStackTrace();
296  fail("Error:\n" + e.toString());
297  }
298  return null;
299  }
300 
304  public void AnalysisError(String stmt) {
305  AnalysisError(stmt, null);
306  }
307 
311  public ParseNode AnalyzesOk(String stmt, Analyzer analyzer) {
312  return AnalyzesOk(stmt, analyzer, null);
313  }
314 
319  public void AnalysisError(String stmt, String expectedErrorString) {
320  AnalysisError(stmt, createAnalyzer(Catalog.DEFAULT_DB), expectedErrorString);
321  }
322 
327  public void AnalysisError(String stmt, Analyzer analyzer, String expectedErrorString) {
328  Preconditions.checkNotNull(expectedErrorString, "No expected error message given.");
329  LOG.info("processing " + stmt);
330  try {
331  AnalysisContext analysisCtx = new AnalysisContext(catalog_,
333  System.getProperty("user.name")),
335  analysisCtx.analyze(stmt, analyzer);
336  AnalysisContext.AnalysisResult analysisResult = analysisCtx.getAnalysisResult();
337  Preconditions.checkNotNull(analysisResult.getStmt());
338  } catch (Exception e) {
339  String errorString = e.getMessage();
340  Assert.assertTrue(
341  "got error:\n" + errorString + "\nexpected:\n" + expectedErrorString,
342  errorString.startsWith(expectedErrorString));
343  return;
344  }
345  fail("Stmt didn't result in analysis error: " + stmt);
346  }
347 
360  protected void TblsAnalyzeOk(String query, TableName tbl) {
361  Preconditions.checkState(tbl.isFullyQualified());
362  Preconditions.checkState(query.contains("$TBL"));
363  String uqQuery = query.replace("$TBL", tbl.getTbl());
364  AnalyzesOk(uqQuery, createAnalyzer(tbl.getDb()));
365  String fqQuery = query.replace("$TBL", tbl.toString());
366  AnalyzesOk(fqQuery);
367  }
368 
373  protected void TblsAnalysisError(String query, TableName tbl,
374  String expectedError) {
375  Preconditions.checkState(tbl.isFullyQualified());
376  Preconditions.checkState(query.contains("$TBL"));
377  String uqQuery = query.replace("$TBL", tbl.getTbl());
378  AnalysisError(uqQuery, createAnalyzer(tbl.getDb()), expectedError);
379  String fqQuery = query.replace("$TBL", tbl.toString());
380  AnalysisError(fqQuery, expectedError);
381  }
382 
386  private void checkBinaryExprs(Expr expr) {
387  if (expr instanceof BinaryPredicate
388  || (expr instanceof ArithmeticExpr
389  && ((ArithmeticExpr) expr).getOp() != ArithmeticExpr.Operator.BITNOT)) {
390  Assert.assertEquals(expr.getChildren().size(), 2);
391  // The types must be equal or one of them is NULL_TYPE.
392  Assert.assertTrue(expr.getChild(0).getType() == expr.getChild(1).getType()
393  || expr.getChild(0).getType().isNull() || expr.getChild(1).getType().isNull());
394  }
395  for (Expr child: expr.getChildren()) {
396  checkBinaryExprs(child);
397  }
398  }
399 
400  @Test
401  public void TestCompressedText() throws AnalysisException {
402  AnalyzesOk("SELECT count(*) FROM functional_text_lzo.tinyinttable");
403  // TODO: Disabling the text/{gzip,bzip,snap} analysis test until the corresponding
404  // databases are loaded.
405  // AnalyzesOk("SELECT count(*) FROM functional_text_gzip.tinyinttable");
406  // AnalyzesOk("SELECT count(*) FROM functional_text_snap.tinyinttable");
407  // AnalyzesOk("SELECT count(*) FROM functional_text_bzip.tinyinttable");
408  }
409 
410  @Test
411  public void TestMemLayout() throws AnalysisException {
412  testSelectStar();
413  testNonNullable();
416  }
417 
418  private void testSelectStar() throws AnalysisException {
419  AnalyzesOk("select * from functional.AllTypes");
420  DescriptorTable descTbl = analyzer_.getDescTbl();
421  TupleDescriptor tupleD = descTbl.getTupleDesc(new TupleId(0));
422  for (SlotDescriptor slotD: tupleD.getSlots()) {
423  slotD.setIsMaterialized(true);
424  }
425  descTbl.computeMemLayout();
426  Assert.assertEquals(97.0f, tupleD.getAvgSerializedSize());
427  checkLayoutParams("functional.alltypes.bool_col", 1, 2, 0, 0);
428  checkLayoutParams("functional.alltypes.tinyint_col", 1, 3, 0, 1);
429  checkLayoutParams("functional.alltypes.smallint_col", 2, 4, 0, 2);
430  checkLayoutParams("functional.alltypes.id", 4, 8, 0, 3);
431  checkLayoutParams("functional.alltypes.int_col", 4, 12, 0, 4);
432  checkLayoutParams("functional.alltypes.float_col", 4, 16, 0, 5);
433  checkLayoutParams("functional.alltypes.year", 4, 20, 0, 6);
434  checkLayoutParams("functional.alltypes.month", 4, 24, 0, 7);
435  checkLayoutParams("functional.alltypes.bigint_col", 8, 32, 1, 0);
436  checkLayoutParams("functional.alltypes.double_col", 8, 40, 1, 1);
437  int strSlotSize = PrimitiveType.STRING.getSlotSize();
438  checkLayoutParams("functional.alltypes.date_string_col", strSlotSize, 48, 1, 2);
439  checkLayoutParams("functional.alltypes.string_col",
440  strSlotSize, 48 + strSlotSize, 1, 3);
441  }
442 
443  private void testNonNullable() throws AnalysisException {
444  // both slots are non-nullable bigints. The layout should look like:
445  // (byte range : data)
446  // 0 - 7: count(int_col)
447  // 8 - 15: count(*)
448  AnalyzesOk("select count(int_col), count(*) from functional.AllTypes");
449  DescriptorTable descTbl = analyzer_.getDescTbl();
450  TupleDescriptor aggDesc = descTbl.getTupleDesc(new TupleId(1));
451  for (SlotDescriptor slotD: aggDesc.getSlots()) {
452  slotD.setIsMaterialized(true);
453  }
454  descTbl.computeMemLayout();
455  Assert.assertEquals(16.0f, aggDesc.getAvgSerializedSize());
456  Assert.assertEquals(16, aggDesc.getByteSize());
457  checkLayoutParams(aggDesc.getSlots().get(0), 8, 0, 0, -1);
458  checkLayoutParams(aggDesc.getSlots().get(1), 8, 8, 0, -1);
459  }
460 
461  private void testMixedNullable() throws AnalysisException {
462  // one slot is nullable, one is not. The layout should look like:
463  // (byte range : data)
464  // 0 : 1 nullable-byte (only 1 bit used)
465  // 1 - 7: padded bytes
466  // 8 - 15: sum(int_col)
467  // 16 - 23: count(*)
468  AnalyzesOk("select sum(int_col), count(*) from functional.AllTypes");
469  DescriptorTable descTbl = analyzer_.getDescTbl();
470  TupleDescriptor aggDesc = descTbl.getTupleDesc(new TupleId(1));
471  for (SlotDescriptor slotD: aggDesc.getSlots()) {
472  slotD.setIsMaterialized(true);
473  }
474  descTbl.computeMemLayout();
475  Assert.assertEquals(16.0f, aggDesc.getAvgSerializedSize());
476  Assert.assertEquals(24, aggDesc.getByteSize());
477  checkLayoutParams(aggDesc.getSlots().get(0), 8, 8, 0, 0);
478  checkLayoutParams(aggDesc.getSlots().get(1), 8, 16, 0, -1);
479  }
480 
485  AnalyzesOk("select * from functional.alltypes");
486  DescriptorTable descTbl = analyzer_.getDescTbl();
487  TupleDescriptor tupleD = descTbl.getTupleDesc(new TupleId(0));
488  ArrayList<SlotDescriptor> slots = tupleD.getSlots();
489  for (SlotDescriptor slotD: slots) {
490  slotD.setIsMaterialized(true);
491  }
492  // Mark slots 0 (id), 7 (double_col), 9 (string_col) as non-materialized.
493  slots.get(0).setIsMaterialized(false);
494  slots.get(7).setIsMaterialized(false);
495  slots.get(9).setIsMaterialized(false);
496 
497  descTbl.computeMemLayout();
498  Assert.assertEquals(68.0f, tupleD.getAvgSerializedSize());
499  // Check non-materialized slots.
500  checkLayoutParams("functional.alltypes.id", 0, -1, 0, 0);
501  checkLayoutParams("functional.alltypes.double_col", 0, -1, 0, 0);
502  checkLayoutParams("functional.alltypes.string_col", 0, -1, 0, 0);
503  // Check materialized slots.
504  checkLayoutParams("functional.alltypes.bool_col", 1, 2, 0, 0);
505  checkLayoutParams("functional.alltypes.tinyint_col", 1, 3, 0, 1);
506  checkLayoutParams("functional.alltypes.smallint_col", 2, 4, 0, 2);
507  checkLayoutParams("functional.alltypes.int_col", 4, 8, 0, 3);
508  checkLayoutParams("functional.alltypes.float_col", 4, 12, 0, 4);
509  checkLayoutParams("functional.alltypes.year", 4, 16, 0, 5);
510  checkLayoutParams("functional.alltypes.month", 4, 20, 0, 6);
511  checkLayoutParams("functional.alltypes.bigint_col", 8, 24, 0, 7);
512  int strSlotSize = PrimitiveType.STRING.getSlotSize();
513  checkLayoutParams("functional.alltypes.date_string_col", strSlotSize, 32, 1, 0);
514  }
515 
516  private void checkLayoutParams(SlotDescriptor d, int byteSize, int byteOffset,
517  int nullIndicatorByte, int nullIndicatorBit) {
518  Assert.assertEquals(byteSize, d.getByteSize());
519  Assert.assertEquals(byteOffset, d.getByteOffset());
520  Assert.assertEquals(nullIndicatorByte, d.getNullIndicatorByte());
521  Assert.assertEquals(nullIndicatorBit, d.getNullIndicatorBit());
522  }
523 
524  private void checkLayoutParams(String colAlias, int byteSize, int byteOffset,
525  int nullIndicatorByte, int nullIndicatorBit) {
526  SlotDescriptor d = analyzer_.getSlotDescriptor(colAlias);
527  checkLayoutParams(d, byteSize, byteOffset, nullIndicatorByte, nullIndicatorBit);
528  }
529 
530  // Analyzes query and asserts that the first result expr returns the given type.
531  // Requires query to parse to a SelectStmt.
532  protected void checkExprType(String query, Type type) {
533  SelectStmt select = (SelectStmt) AnalyzesOk(query);
534  Assert.assertEquals(select.getResultExprs().get(0).getType(), type);
535  }
536 
548  @Test
549  public void TestUnsupportedTypes() {
550  // Select supported types from a table with mixed supported/unsupported types.
551  AnalyzesOk("select int_col, str_col, bigint_col from functional.unsupported_types");
552 
553  // Select supported types from a table with mixed supported/unsupported types.
554  AnalyzesOk("select int_col, str_col, bigint_col from functional.unsupported_types");
555 
556  // Unsupported type binary.
557  AnalysisError("select bin_col from functional.unsupported_types",
558  "Unsupported type 'BINARY' in 'bin_col'.");
559  // Mixed supported/unsupported types.
560  AnalysisError("select int_col, str_col, bin_col " +
561  "from functional.unsupported_types",
562  "Unsupported type 'BINARY' in 'bin_col'.");
563  // Unsupported partition-column type.
564  AnalysisError("select * from functional.unsupported_partition_types",
565  "Failed to load metadata for table: 'functional.unsupported_partition_types'");
566 
567  // Try with hbase
568  AnalyzesOk("describe functional_hbase.allcomplextypes");
569  }
570 
571  @Test
572  public void TestBinaryHBaseTable() {
573  AnalyzesOk("select * from functional_hbase.alltypessmallbinary");
574  }
575 
576  @Test
577  public void TestUnsupportedSerde() {
578  AnalysisError("select * from functional.bad_serde",
579  "Failed to load metadata for table: 'functional.bad_serde'");
580  }
581 
582  @Test
583  public void TestResetMetadata() {
584  AnalyzesOk("invalidate metadata");
585  AnalyzesOk("invalidate metadata functional.alltypessmall");
586  AnalyzesOk("invalidate metadata functional.alltypes_view");
587  AnalyzesOk("invalidate metadata functional.bad_serde");
588  AnalyzesOk("refresh functional.alltypessmall");
589  AnalyzesOk("refresh functional.alltypes_view");
590  AnalyzesOk("refresh functional.bad_serde");
591 
592  // invalidate metadata <table name> checks the Hive Metastore for table existence
593  // and should not throw an AnalysisError if the table or db does not exist.
594  AnalyzesOk("invalidate metadata functional.unknown_table");
595  AnalyzesOk("invalidate metadata unknown_db.unknown_table");
596 
597  AnalysisError("refresh functional.unknown_table",
598  "Table does not exist: functional.unknown_table");
599  AnalysisError("refresh unknown_db.unknown_table",
600  "Database does not exist: unknown_db");
601  }
602 
603  @Test
604  public void TestExplain() {
605  // Analysis error from explain insert: too many partitioning columns.
606  AnalysisError("explain insert into table functional.alltypessmall " +
607  "partition (year=2009, month=4, year=10)" +
608  "select id, bool_col, tinyint_col, smallint_col, int_col, bigint_col, " +
609  "float_col, double_col, date_string_col, string_col, timestamp_col " +
610  "from functional.alltypes",
611  "Duplicate column 'year' in partition clause");
612 
613  // Analysis error from explain query
614  AnalysisError("explain " +
615  "select id from (select id+2 from functional_hbase.alltypessmall) a",
616  "Could not resolve column/field reference: 'id'");
617 
618  // Positive test for explain query
619  AnalyzesOk("explain select * from functional.AllTypes");
620 
621  // Positive test for explain insert
622  AnalyzesOk("explain insert into table functional.alltypessmall " +
623  "partition (year=2009, month=4)" +
624  "select id, bool_col, tinyint_col, smallint_col, int_col, int_col, " +
625  "float_col, float_col, date_string_col, string_col, timestamp_col " +
626  "from functional.alltypes");
627  }
628 
629  @Test
630  public void TestLimitAndOffset() {
631  // Arithmetic expressions that result in a positive, integral value are OK
632  AnalyzesOk("select * from functional.AllTypes limit 10 * 10 + 10 - 10 % 10");
633  AnalyzesOk("select * from functional.AllTypes limit 1 ^ 0 | 3 & 3");
634  // Test offset, requires order by and limit
635  AnalyzesOk("select * from functional.AllTypes order by id limit 10 offset 1+2*3%4");
636  // Test offset within an inline view and with-clause view
637  AnalyzesOk("select t5.id from (select id from functional.AllTypes order by id " +
638  "limit 10 offset 2) t5");
639  AnalyzesOk("with t5 as (select id from functional.AllTypes order by id limit 10 " +
640  "offset 2) select * from t5");
641 
642  // Casting to int is fine
643  AnalyzesOk("select id, bool_col from functional.AllTypes limit CAST(10.0 AS INT)");
644  AnalyzesOk("select id, bool_col from functional.AllTypes limit " +
645  "CAST(NOT FALSE AS INT)");
646  AnalyzesOk("select * from functional.AllTypes order by id limit 10 " +
647  "offset CAST(1.0 AS INT)");
648 
649  // Analysis error from negative values
650  AnalysisError("select * from functional.AllTypes limit 10 - 20",
651  "LIMIT must be a non-negative integer: 10 - 20 = -10");
652  AnalysisError("select * from functional.AllTypes order by id limit 10 " +
653  "offset 10 - 20",
654  "OFFSET must be a non-negative integer: 10 - 20 = -10");
655 
656  // Analysis error from non-integral values
657  AnalysisError("select * from functional.AllTypes limit 10.0",
658  "LIMIT expression must be an integer type but is 'DECIMAL(3,1)': 10.0");
659  AnalysisError("select * from functional.AllTypes limit NOT FALSE",
660  "LIMIT expression must be an integer type but is 'BOOLEAN': NOT FALSE");
661  AnalysisError("select * from functional.AllTypes limit CAST(\"asdf\" AS INT)",
662  "LIMIT expression evaluates to NULL: CAST('asdf' AS INT)");
663  AnalysisError("select * from functional.AllTypes order by id limit 10 " +
664  "OFFSET 10.0",
665  "OFFSET expression must be an integer type but is 'DECIMAL(3,1)': 10.0");
666  AnalysisError("select * from functional.AllTypes order by id limit 10 " +
667  "offset CAST('asdf' AS INT)",
668  "OFFSET expression evaluates to NULL: CAST('asdf' AS INT)");
669 
670  // Analysis error from non-constant expressions
671  AnalysisError("select id, bool_col from functional.AllTypes limit id < 10",
672  "LIMIT expression must be a constant expression: id < 10");
673  AnalysisError("select id, bool_col from functional.AllTypes order by id limit 10 " +
674  "offset id < 10",
675  "OFFSET expression must be a constant expression: id < 10");
676 
677  // Offset is only valid with an order by
678  AnalysisError("SELECT a FROM test LIMIT 10 OFFSET 5",
679  "OFFSET requires an ORDER BY clause: LIMIT 10 OFFSET 5");
680  AnalysisError("SELECT x.id FROM (SELECT id FROM alltypesagg LIMIT 5 OFFSET 5) x " +
681  "ORDER BY x.id LIMIT 100 OFFSET 4",
682  "OFFSET requires an ORDER BY clause: LIMIT 5 OFFSET 5");
683  AnalysisError("SELECT a FROM test OFFSET 5",
684  "OFFSET requires an ORDER BY clause: OFFSET 5");
685  AnalyzesOk("SELECT id FROM functional.Alltypes ORDER BY bool_col OFFSET 5");
686  }
687 
688  @Test
690  AnalyzesOk("show create table functional.AllTypes");
691  AnalysisError("show create table functional.alltypes_view",
692  "SHOW CREATE TABLE not supported on VIEW: functional.alltypes_view");
693  AnalysisError("show create table functional.not_a_table",
694  "Table does not exist: functional.not_a_table");
695  AnalysisError("show create table doesnt_exist",
696  "Table does not exist: default.doesnt_exist");
697  }
698 
699  private Function createFunction(boolean hasVarArgs, Type... args) {
700  return new Function(new FunctionName("test"), args, Type.INVALID, hasVarArgs);
701  }
702 
703  @Test
704  // Test matching function signatures.
705  public void TestFunctionMatching() {
706  Function[] fns = new Function[14];
707  // test()
708  fns[0] = createFunction(false);
709 
710  // test(int)
711  fns[1] = createFunction(false, Type.INT);
712 
713  // test(int...)
714  fns[2] = createFunction(true, Type.INT);
715 
716  // test(tinyint)
717  fns[3] = createFunction(false, Type.TINYINT);
718 
719  // test(tinyint...)
720  fns[4] = createFunction(true, Type.TINYINT);
721 
722  // test(double)
723  fns[5] = createFunction(false, Type.DOUBLE);
724 
725  // test(double...)
726  fns[6] = createFunction(true, Type.DOUBLE);
727 
728  // test(double, double)
729  fns[7] = createFunction(false, Type.DOUBLE, Type.DOUBLE);
730 
731  // test(double, double...)
732  fns[8] = createFunction(true, Type.DOUBLE, Type.DOUBLE);
733 
734  // test(smallint, tinyint)
735  fns[9] = createFunction(false, Type.SMALLINT, Type.TINYINT);
736 
737  // test(int, double, double, double)
738  fns[10] = createFunction(false, Type.INT, Type.DOUBLE, Type.DOUBLE, Type.DOUBLE);
739 
740  // test(int, string, int...)
741  fns[11] = createFunction(true, Type.INT, Type.STRING, Type.INT);
742 
743  // test(tinying, string, tinyint, int, tinyint)
744  fns[12] = createFunction(false, Type.TINYINT, Type.STRING, Type.TINYINT, Type.INT,
745  Type.TINYINT);
746 
747  // test(tinying, string, bigint, int, tinyint)
748  fns[13] = createFunction(false, Type.TINYINT, Type.STRING, Type.BIGINT, Type.INT,
749  Type.TINYINT);
750 
751  Assert.assertFalse(fns[1].compare(fns[0], Function.CompareMode.IS_SUPERTYPE_OF));
752  Assert.assertTrue(fns[1].compare(fns[2], Function.CompareMode.IS_SUPERTYPE_OF));
753  Assert.assertTrue(fns[1].compare(fns[3], Function.CompareMode.IS_SUPERTYPE_OF));
754  Assert.assertTrue(fns[1].compare(fns[4], Function.CompareMode.IS_SUPERTYPE_OF));
755  Assert.assertFalse(fns[1].compare(fns[5], Function.CompareMode.IS_SUPERTYPE_OF));
756  Assert.assertFalse(fns[1].compare(fns[6], Function.CompareMode.IS_SUPERTYPE_OF));
757  Assert.assertFalse(fns[1].compare(fns[7], Function.CompareMode.IS_SUPERTYPE_OF));
758  Assert.assertFalse(fns[1].compare(fns[8], Function.CompareMode.IS_SUPERTYPE_OF));
759 
760  Assert.assertTrue(fns[1].compare(fns[2], Function.CompareMode.IS_INDISTINGUISHABLE));
761  Assert.assertTrue(fns[3].compare(fns[4], Function.CompareMode.IS_INDISTINGUISHABLE));
762  Assert.assertTrue(fns[5].compare(fns[6], Function.CompareMode.IS_INDISTINGUISHABLE));
763  Assert.assertFalse(fns[5].compare(fns[7], Function.CompareMode.IS_INDISTINGUISHABLE));
764  Assert.assertFalse(fns[5].compare(fns[8], Function.CompareMode.IS_INDISTINGUISHABLE));
765  Assert.assertTrue(fns[6].compare(fns[7], Function.CompareMode.IS_INDISTINGUISHABLE));
766  Assert.assertTrue(fns[6].compare(fns[8], Function.CompareMode.IS_INDISTINGUISHABLE));
767  Assert.assertTrue(fns[7].compare(fns[8], Function.CompareMode.IS_INDISTINGUISHABLE));
768  Assert.assertFalse(fns[1].compare(fns[3], Function.CompareMode.IS_INDISTINGUISHABLE));
769  Assert.assertFalse(fns[1].compare(fns[4], Function.CompareMode.IS_INDISTINGUISHABLE));
770 
771  Assert.assertFalse(fns[9].compare(fns[4], Function.CompareMode.IS_SUPERTYPE_OF));
772  Assert.assertTrue(fns[2].compare(fns[9], Function.CompareMode.IS_SUPERTYPE_OF));
773 
774  Assert.assertTrue(fns[8].compare(fns[10], Function.CompareMode.IS_SUPERTYPE_OF));
775  Assert.assertFalse(fns[10].compare(fns[8], Function.CompareMode.IS_SUPERTYPE_OF));
776 
777  Assert.assertTrue(fns[11].compare(fns[12], Function.CompareMode.IS_SUPERTYPE_OF));
778  Assert.assertFalse(fns[11].compare(fns[13], Function.CompareMode.IS_SUPERTYPE_OF));
779 
780  for (int i = 0; i < fns.length; ++i) {
781  for (int j = 0; j < fns.length; ++j) {
782  if (i == j) {
783  Assert.assertTrue(
784  fns[i].compare(fns[i], Function.CompareMode.IS_IDENTICAL));
785  Assert.assertTrue(
786  fns[i].compare(fns[i], Function.CompareMode.IS_INDISTINGUISHABLE));
787  Assert.assertTrue(
788  fns[i].compare(fns[i], Function.CompareMode.IS_SUPERTYPE_OF));
789  } else {
790  Assert.assertFalse(fns[i].compare(fns[j], Function.CompareMode.IS_IDENTICAL));
791  if (fns[i].compare(fns[j], Function.CompareMode.IS_INDISTINGUISHABLE)) {
792  // If it's a indistinguishable, at least one of them must be a super type
793  // of the other
794  Assert.assertTrue(
795  fns[i].compare(fns[j], Function.CompareMode.IS_SUPERTYPE_OF) ||
796  fns[j].compare(fns[i], Function.CompareMode.IS_SUPERTYPE_OF));
797  } else if (fns[i].compare(fns[j], Function.CompareMode.IS_INDISTINGUISHABLE)) {
798  // This is reflexive
799  Assert.assertTrue(
800  fns[j].compare(fns[i], Function.CompareMode.IS_INDISTINGUISHABLE));
801  }
802  }
803  }
804  }
805  }
806 }
ParseNode AnalyzesOk(String stmt, Analyzer analyzer)
void TblsAnalyzeOk(String query, TableName tbl)
static final ScalarType NULL
Definition: Type.java:45
static final ScalarType BIGINT
Definition: Type.java:50
static final ScalarType STRING
Definition: Type.java:53
static Map< ScalarType, String > typeToLiteralValue_
void AnalysisError(String stmt, String expectedErrorString)
Analyzer createAnalyzer(String defaultDb)
void checkExprType(String query, Type type)
static final ScalarType BOOLEAN
Definition: Type.java:46
int TupleId
Definition: global-types.h:23
ArrayList< FunctionCallExpr > getAggregateExprs()
void TblsAnalysisError(String query, TableName tbl, String expectedError)
ParseNode AnalyzesOk(String stmt, String expectedWarning)
static final ScalarType SMALLINT
Definition: Type.java:48
static final ScalarType FLOAT
Definition: Type.java:51
void addTestUda(String name, Type retType, Type...argTypes)
void AnalysisError(String stmt, Analyzer analyzer, String expectedErrorString)
ParseNode AnalyzesOk(String stmt, Analyzer analyzer, String expectedWarning)
PrimitiveType
Definition: types.h:27
Analyzer createAnalyzer(TQueryOptions queryOptions)
static final ScalarType DOUBLE
Definition: Type.java:52
Table addTestTable(String createTableSql)
static final ScalarType TINYINT
Definition: Type.java:47
Function createFunction(boolean hasVarArgs, Type...args)
Function addTestFunction(String name, ScalarType arg, boolean varArgs)
static final ScalarType INT
Definition: Type.java:49
uint64_t Test(T *ht, const ProbeTuple *input, uint64_t num_tuples)
Function addTestFunction(String name, ArrayList< ScalarType > args, boolean varArgs)
void checkLayoutParams(SlotDescriptor d, int byteSize, int byteOffset, int nullIndicatorByte, int nullIndicatorBit)
void checkLayoutParams(String colAlias, int byteSize, int byteOffset, int nullIndicatorByte, int nullIndicatorBit)
string name
Definition: cpu-info.cc:50
Function addTestFunction(String db, String fnName, ArrayList< ScalarType > args, boolean varArgs)
static final ScalarType INVALID
Definition: Type.java:44
static final String DEFAULT_DB
Definition: Catalog.java:58
static final ScalarType TIMESTAMP
Definition: Type.java:55