15 package com.cloudera.impala.analysis;
17 import static org.junit.Assert.assertTrue;
19 import java.io.IOException;
20 import java.util.ArrayList;
21 import java.util.UUID;
23 import junit.framework.Assert;
25 import org.apache.hadoop.fs.FSDataOutputStream;
26 import org.apache.hadoop.fs.FileSystem;
27 import org.apache.hadoop.fs.Path;
28 import org.apache.hadoop.fs.permission.FsAction;
29 import org.apache.hadoop.fs.permission.FsPermission;
40 import com.google.common.base.Preconditions;
41 import com.google.common.collect.Lists;
47 String[] addDrop = {
"add if not exists",
"drop if exists"};
48 for (String kw: addDrop) {
50 AnalyzesOk(
"alter table functional.alltypes " + kw +
51 " partition(year=2050, month=10)");
52 AnalyzesOk(
"alter table functional.alltypes " + kw +
53 " partition(month=10, year=2050)");
54 AnalyzesOk(
"alter table functional.insert_string_partitioned " + kw +
55 " partition(s2='1234')");
58 AnalysisError(
"alter table functional.alltypesnopart " + kw +
" partition (i=1)",
59 "Table is not partitioned: functional.alltypesnopart");
60 AnalysisError(
"alter table functional_hbase.alltypesagg " + kw +
61 " partition (i=1)",
"Table is not partitioned: functional_hbase.alltypesagg");
65 " partition(year=2050, year=2051)",
"Duplicate partition key name: year");
68 " partition(year=2050, int_col=1)",
69 "Column 'int_col' is not a partition column in table: functional.alltypes");
72 AnalyzesOk(
"alter table functional.alltypes " + kw +
73 " partition(year=NULL, month=1)");
74 AnalyzesOk(
"alter table functional.alltypes " + kw +
75 " partition(year=NULL, month=NULL)");
76 AnalyzesOk(
"alter table functional.alltypes " + kw +
77 " partition(year=ascii(null), month=ascii(NULL))");
79 AnalyzesOk(
"alter table functional.insert_string_partitioned " + kw +
82 AnalyzesOk(
"alter table functional.alltypes " + kw +
83 " partition(year=-1, month=cast((10+5*4) as INT))");
87 " partition(year=2050, month=int_col)",
88 "Non-constant expressions are not supported as static partition-key values " +
89 "in 'month=int_col'.");
91 " partition(year=cast(int_col as int), month=12)",
92 "Non-constant expressions are not supported as static partition-key values " +
93 "in 'year=CAST(int_col AS INT)'.");
97 " partition(year=2050, blah=1)",
98 "Partition column 'blah' not found in table: functional.alltypes");
102 "alter table functional.insert_string_partitioned " + kw +
103 " partition(s2=1234)",
104 "Value of partition spec (column=s2) has incompatible type: 'SMALLINT'. " +
105 "Expected type: 'STRING'.");
109 "alter table functional.alltypes " + kw +
110 " partition(year=100000000000, month=10)",
111 "Partition key value may result in loss of precision.\nWould need to cast" +
112 " '100000000000' to 'INT' for partition column: year");
116 AnalysisError(
"alter table db_does_not_exist.alltypes " + kw +
117 " partition (i=1)",
"Database does not exist: db_does_not_exist");
118 AnalysisError(
"alter table functional.table_does_not_exist " + kw +
119 " partition (i=1)",
"Table does not exist: functional.table_does_not_exist");
123 " partition(year=2050, month=10)",
124 "ALTER TABLE not allowed on a view: functional.alltypes_view");
125 AnalysisError(
"alter table functional.alltypes_datasource " + kw +
126 " partition(year=2050, month=10)",
127 "ALTER TABLE not allowed on a table produced by a data source: " +
128 "functional.alltypes_datasource");
132 AnalyzesOk(
"alter table functional.alltypes add " +
133 "partition(year=2050, month=10)");
135 "partition(year=2010, month=10)",
136 "Partition spec already exists: (year=2010, month=10).");
137 AnalyzesOk(
"alter table functional.alltypes add if not exists " +
138 " partition(year=2010, month=10)");
139 AnalyzesOk(
"alter table functional.alltypes add if not exists " +
140 " partition(year=2010, month=10) location " +
141 "'/test-warehouse/alltypes/year=2010/month=10'");
144 AnalyzesOk(
"alter table functional.alltypes drop " +
145 "partition(year=2010, month=10)");
147 "partition(year=2050, month=10)",
148 "Partition spec does not exist: (year=2050, month=10).");
149 AnalyzesOk(
"alter table functional.alltypes drop if exists " +
150 "partition(year=2050, month=10)");
153 AnalyzesOk(
"alter table functional.alltypes add " +
154 "partition(year=2050, month=10) cached in 'testPool'");
155 AnalyzesOk(
"alter table functional.alltypes add " +
156 "partition(year=2050, month=10) cached in 'testPool' with replication = 10");
157 AnalyzesOk(
"alter table functional.alltypes add " +
158 "partition(year=2050, month=10) uncached");
160 "partition(year=2050, month=10) cached in 'badPool'",
161 "The specified cache pool does not exist: badPool");
164 AnalyzesOk(
"alter table functional.alltypes add " +
165 " partition(year=2050, month=10) location " +
166 "'/test-warehouse/alltypes/year=2010/month=10'");
167 AnalyzesOk(
"alter table functional.alltypes add " +
168 " partition(year=2050, month=10) location " +
169 "'hdfs://localhost:20500/test-warehouse/alltypes/year=2010/month=10'");
170 AnalyzesOk(
"alter table functional.alltypes add " +
171 " partition(year=2050, month=10) location " +
172 "'s3n://bucket/test-warehouse/alltypes/year=2010/month=10'");
173 AnalyzesOk(
"alter table functional.alltypes add " +
174 " partition(year=2050, month=10) location " +
175 "'file:///test-warehouse/alltypes/year=2010/month=10'");
179 " partition(year=2050, month=10) location " +
180 "'foofs://bar/test-warehouse/alltypes/year=2010/month=10'",
181 "No FileSystem for scheme: foofs");
183 " partition(year=2050, month=10) location ' '",
184 "URI path cannot be empty.");
189 AnalyzesOk(
"alter table functional.alltypes add columns (new_col int)");
190 AnalyzesOk(
"alter table functional.alltypes add columns (c1 string comment 'hi')");
191 AnalyzesOk(
"alter table functional.alltypes add columns (c struct<f1:int>)");
193 "alter table functional.alltypes replace columns (c1 int comment 'c', c2 int)");
194 AnalyzesOk(
"alter table functional.alltypes replace columns (c array<string>)");
197 AnalysisError(
"alter table functional.alltypes add columns (int_col int)",
198 "Column already exists: int_col");
200 AnalysisError(
"alter table functional.alltypes add columns (year int)",
201 "Column name conflicts with existing partition column: year");
203 AnalysisError(
"alter table functional.alltypes add columns (`???` int)",
204 "Invalid column/field name: ???");
205 AnalysisError(
"alter table functional.alltypes replace columns (`???` int)",
206 "Invalid column/field name: ???");
209 AnalyzesOk(
"alter table functional.alltypes replace columns (int_col int)");
211 AnalysisError(
"alter table functional.alltypes replace columns (Year int)",
212 "Column name conflicts with existing partition column: year");
215 AnalysisError(
"alter table functional.alltypes add columns (c1 int, c1 int)",
216 "Duplicate column name: c1");
217 AnalysisError(
"alter table functional.alltypes replace columns (c1 int, C1 int)",
218 "Duplicate column name: c1");
221 AnalysisError(
"alter table db_does_not_exist.alltypes add columns (i int)",
222 "Database does not exist: db_does_not_exist");
223 AnalysisError(
"alter table functional.table_does_not_exist add columns (i int)",
224 "Table does not exist: functional.table_does_not_exist");
228 "add columns (c1 string comment 'hi')",
229 "ALTER TABLE not allowed on a view: functional.alltypes_view");
231 AnalysisError(
"alter table functional.alltypes_datasource " +
232 "add columns (c1 string comment 'hi')",
233 "ALTER TABLE not allowed on a table produced by a data source: " +
234 "functional.alltypes_datasource");
237 AnalysisError(
"alter table functional_hbase.alltypes add columns (i int)",
238 "ALTER TABLE ADD|REPLACE COLUMNS not currently supported on HBase tables.");
243 AnalyzesOk(
"alter table functional.alltypes drop column int_col");
245 AnalysisError(
"alter table functional.alltypes drop column no_col",
246 "Column 'no_col' does not exist in table: functional.alltypes");
248 AnalysisError(
"alter table functional.alltypes drop column year",
249 "Cannot drop partition column: year");
252 AnalysisError(
"alter table functional_seq_snap.bad_seq_snap drop column field",
253 "Cannot drop column 'field' from functional_seq_snap.bad_seq_snap. " +
254 "Tables must contain at least 1 column.");
257 AnalysisError(
"alter table db_does_not_exist.alltypes drop column col1",
258 "Database does not exist: db_does_not_exist");
259 AnalysisError(
"alter table functional.table_does_not_exist drop column col1",
260 "Table does not exist: functional.table_does_not_exist");
263 AnalysisError(
"alter table functional.alltypes_view drop column int_col",
264 "ALTER TABLE not allowed on a view: functional.alltypes_view");
266 AnalysisError(
"alter table functional.alltypes_datasource drop column int_col",
267 "ALTER TABLE not allowed on a table produced by a data source: " +
268 "functional.alltypes_datasource");
271 AnalysisError(
"alter table functional_hbase.alltypes drop column int_col",
272 "ALTER TABLE DROP COLUMN not currently supported on HBase tables.");
278 AnalyzesOk(
"alter table functional.alltypes change column int_col int_col2 int");
280 AnalyzesOk(
"alter table functional.alltypes change column int_col c2 string");
282 "alter table functional.alltypes change column int_col c2 map<int, string>");
284 AnalyzesOk(
"alter table functional.alltypes change column int_col int_col tinyint");
286 AnalyzesOk(
"alter table functional.alltypes change int_col int_col int comment 'c'");
288 AnalysisError(
"alter table functional.alltypes change column no_col c1 int",
289 "Column 'no_col' does not exist in table: functional.alltypes");
291 AnalysisError(
"alter table functional.alltypes change column year year int",
292 "Cannot modify partition column: year");
295 "alter table functional.alltypes change column int_col Tinyint_col int",
296 "Column already exists: Tinyint_col");
299 AnalysisError(
"alter table functional.alltypes change column int_col `???` int",
300 "Invalid column/field name: ???");
303 AnalysisError(
"alter table db_does_not_exist.alltypes change c1 c2 int",
304 "Database does not exist: db_does_not_exist");
305 AnalysisError(
"alter table functional.table_does_not_exist change c1 c2 double",
306 "Table does not exist: functional.table_does_not_exist");
310 "change column int_col int_col2 int",
311 "ALTER TABLE not allowed on a view: functional.alltypes_view");
313 AnalysisError(
"alter table functional.alltypes_datasource " +
314 "change column int_col int_col2 int",
315 "ALTER TABLE not allowed on a table produced by a data source: " +
316 "functional.alltypes_datasource");
319 AnalysisError(
"alter table functional_hbase.alltypes CHANGE COLUMN int_col i int",
320 "ALTER TABLE CHANGE COLUMN not currently supported on HBase tables.");
325 AnalyzesOk(
"alter table functional.alltypes set fileformat sequencefile");
326 AnalyzesOk(
"alter table functional.alltypes set location '/a/b'");
327 AnalyzesOk(
"alter table functional.alltypes set tblproperties('a'='1')");
328 AnalyzesOk(
"alter table functional.alltypes set serdeproperties('a'='2')");
329 AnalyzesOk(
"alter table functional.alltypes PARTITION (Year=2010, month=11) " +
330 "set location '/a/b'");
331 AnalyzesOk(
"alter table functional.alltypes PARTITION (month=11, year=2010) " +
332 "set fileformat parquetfile");
333 AnalyzesOk(
"alter table functional.stringpartitionkey PARTITION " +
334 "(string_col='partition1') set fileformat parquet");
335 AnalyzesOk(
"alter table functional.stringpartitionkey PARTITION " +
336 "(string_col='PaRtiTion1') set location '/a/b/c'");
337 AnalyzesOk(
"alter table functional.alltypes PARTITION (year=2010, month=11) " +
338 "set tblproperties('a'='1')");
339 AnalyzesOk(
"alter table functional.alltypes PARTITION (year=2010, month=11) " +
340 "set serdeproperties ('a'='2')");
342 AnalyzesOk(
"alter table functional.alltypes PARTITION " +
343 "(year=cast(100*20+10 as INT), month=cast(2+9 as INT)) " +
344 "set fileformat sequencefile");
345 AnalyzesOk(
"alter table functional.alltypes PARTITION " +
346 "(year=cast(100*20+10 as INT), month=cast(2+9 as INT)) " +
347 "set location '/a/b'");
350 "(Year=2050, month=int_col) set fileformat sequencefile",
351 "Non-constant expressions are not supported as static partition-key " +
352 "values in 'month=int_col'.");
354 "(Year=2050, month=int_col) set location '/a/b'",
355 "Non-constant expressions are not supported as static partition-key " +
356 "values in 'month=int_col'.");
359 AnalysisError(
"alter table functional.alltypes PARTITION (year=2014, month=11) " +
360 "set location '/a/b'",
361 "Partition spec does not exist: (year=2014, month=11)");
362 AnalysisError(
"alter table functional.alltypes PARTITION (year=2014, month=11) " +
363 "set tblproperties('a'='1')",
364 "Partition spec does not exist: (year=2014, month=11)");
365 AnalysisError(
"alter table functional.alltypes PARTITION (year=2010) " +
366 "set tblproperties('a'='1')",
367 "Items in partition spec must exactly match the partition columns " +
368 "in the table definition: functional.alltypes (1 vs 2)");
369 AnalysisError(
"alter table functional.alltypes PARTITION (year=2010, year=2010) " +
370 "set location '/a/b'",
371 "Duplicate partition key name: year");
372 AnalysisError(
"alter table functional.alltypes PARTITION (month=11, year=2014) " +
373 "set fileformat sequencefile",
374 "Partition spec does not exist: (month=11, year=2014)");
375 AnalysisError(
"alter table functional.alltypesnopart PARTITION (month=1) " +
376 "set fileformat sequencefile",
377 "Table is not partitioned: functional.alltypesnopart");
378 AnalysisError(
"alter table functional.alltypesnopart PARTITION (month=1) " +
379 "set location '/a/b/c'",
380 "Table is not partitioned: functional.alltypesnopart");
381 AnalysisError(
"alter table functional.stringpartitionkey PARTITION " +
382 "(string_col='partition2') set location '/a/b'",
383 "Partition spec does not exist: (string_col='partition2')");
384 AnalysisError(
"alter table functional.stringpartitionkey PARTITION " +
385 "(string_col='partition2') set fileformat sequencefile",
386 "Partition spec does not exist: (string_col='partition2')");
388 "(year=cast(10*20+10 as INT), month=cast(5*3 as INT)) " +
389 "set location '/a/b'",
390 "Partition spec does not exist: " +
391 "(year=CAST(10 * 20 + 10 AS INT), month=CAST(5 * 3 AS INT))");
393 "(year=cast(10*20+10 as INT), month=cast(5*3 as INT)) " +
394 "set fileformat sequencefile",
395 "Partition spec does not exist: " +
396 "(year=CAST(10 * 20 + 10 AS INT), month=CAST(5 * 3 AS INT))");
399 AnalysisError(
"alter table db_does_not_exist.alltypes set fileformat sequencefile",
400 "Database does not exist: db_does_not_exist");
401 AnalysisError(
"alter table functional.table_does_not_exist set fileformat rcfile",
402 "Table does not exist: functional.table_does_not_exist");
403 AnalysisError(
"alter table db_does_not_exist.alltypes set location '/a/b'",
404 "Database does not exist: db_does_not_exist");
405 AnalysisError(
"alter table functional.table_does_not_exist set location '/a/b'",
406 "Table does not exist: functional.table_does_not_exist");
407 AnalysisError(
"alter table functional.no_tbl partition(i=1) set location '/a/b'",
408 "Table does not exist: functional.no_tbl");
409 AnalysisError(
"alter table no_db.alltypes partition(i=1) set fileformat textfile",
410 "Database does not exist: no_db");
413 AnalyzesOk(
"alter table functional.alltypes set location " +
414 "'hdfs://localhost:20500/test-warehouse/a/b'");
415 AnalyzesOk(
"alter table functional.alltypes set location " +
416 "'s3n://bucket/test-warehouse/a/b'");
417 AnalyzesOk(
"alter table functional.alltypes set location " +
418 "'file:///test-warehouse/a/b'");
421 AnalysisError(
"alter table functional.alltypes set location 'test/warehouse'",
422 "URI path must be absolute: test/warehouse");
423 AnalysisError(
"alter table functional.alltypes set location 'blah:///warehouse/'",
424 "No FileSystem for scheme: blah");
425 AnalysisError(
"alter table functional.alltypes set location ''",
426 "URI path cannot be empty.");
427 AnalysisError(
"alter table functional.alltypes set location ' '",
428 "URI path cannot be empty.");
431 AnalysisError(
"alter table functional.alltypes_view set fileformat sequencefile",
432 "ALTER TABLE not allowed on a view: functional.alltypes_view");
434 AnalysisError(
"alter table functional.alltypes_datasource set fileformat parquet",
435 "ALTER TABLE not allowed on a table produced by a data source: " +
436 "functional.alltypes_datasource");
439 AnalysisError(
"alter table functional_hbase.alltypes set tblproperties('a'='b')",
440 "ALTER TABLE SET not currently supported on HBase tables.");
446 AnalyzesOk(
"alter table functional.alltypesnopart set cached in 'testPool'");
447 AnalyzesOk(
"alter table functional.alltypes set cached in 'testPool'");
448 AnalyzesOk(
"alter table functional.alltypes partition(year=2010, month=12) " +
449 "set cached in 'testPool'");
452 AnalyzesOk(
"alter table functional.alltypes set cached in 'testPool' " +
453 "with replication = 10");
454 AnalyzesOk(
"alter table functional.alltypes partition(year=2010, month=12) " +
455 "set cached in 'testPool' with replication = 4");
456 AnalysisError(
"alter table functional.alltypes set cached in 'testPool' " +
457 "with replication = 0",
458 "Cache replication factor must be between 0 and Short.MAX_VALUE");
459 AnalysisError(
"alter table functional.alltypes set cached in 'testPool' " +
460 "with replication = 90000",
461 "Cache replication factor must be between 0 and Short.MAX_VALUE");
464 AnalysisError(
"alter table functional_hbase.alltypesnopart set cached in 'testPool'",
465 "ALTER TABLE SET not currently supported on HBase tables.");
466 AnalysisError(
"alter table functional.view_view set cached in 'testPool'",
467 "ALTER TABLE not allowed on a view: functional.view_view");
469 AnalysisError(
"alter table functional.alltypes set cached in 'badPool'",
470 "The specified cache pool does not exist: badPool");
471 AnalysisError(
"alter table functional.alltypes partition(year=2010, month=12) " +
472 "set cached in 'badPool'",
"The specified cache pool does not exist: badPool");
475 AnalyzesOk(
"alter table functional.alltypes set uncached");
476 AnalyzesOk(
"alter table functional.alltypes partition(year=2010, month=12) " +
480 AnalyzesOk(
"alter table functional.alltypestiny set cached in 'testPool'");
481 AnalyzesOk(
"alter table functional.alltypestiny partition(year=2009, month=1) " +
482 "set cached in 'testPool'");
485 AnalysisError(
"alter table functional.alltypestiny set location '/tmp/tiny'",
486 "Target table is cached, please uncache before changing the location using: " +
487 "ALTER TABLE functional.alltypestiny SET UNCACHED");
488 AnalysisError(
"alter table functional.alltypestiny partition (year=2009,month=1) " +
489 "set location '/test-warehouse/new_location'",
490 "Target partition is cached, please uncache before changing the location " +
491 "using: ALTER TABLE functional.alltypestiny PARTITION (year=2009, month=1) " +
495 AnalysisError(
"alter table baddb.alltypestiny set cached in 'testPool'",
496 "Database does not exist: baddb");
497 AnalysisError(
"alter table functional.badtbl set cached in 'testPool'",
498 "Table does not exist: functional.badtbl");
499 AnalysisError(
"alter table functional.alltypestiny partition(year=9999, month=1) " +
500 "set cached in 'testPool'",
501 "Partition spec does not exist: (year=9999, month=1).");
506 AnalyzesOk(
"alter table functional.alltypes rename to new_alltypes");
507 AnalyzesOk(
"alter table functional.alltypes rename to functional.new_alltypes");
508 AnalysisError(
"alter table functional.alltypes rename to functional.alltypes",
509 "Table already exists: functional.alltypes");
510 AnalysisError(
"alter table functional.alltypes rename to functional.alltypesagg",
511 "Table already exists: functional.alltypesagg");
513 AnalysisError(
"alter table functional.table_does_not_exist rename to new_table",
514 "Table does not exist: functional.table_does_not_exist");
515 AnalysisError(
"alter table db_does_not_exist.alltypes rename to new_table",
516 "Database does not exist: db_does_not_exist");
519 AnalysisError(
"alter table functional.alltypes rename to `???`.new_table",
520 "Invalid database name: ???");
521 AnalysisError(
"alter table functional.alltypes rename to functional.`%^&`",
522 "Invalid table/view name: %^&");
525 "alter table functional.alltypes rename to db_does_not_exist.new_table",
526 "Database does not exist: db_does_not_exist");
529 AnalysisError(
"alter table functional.alltypes_view rename to new_alltypes",
530 "ALTER TABLE not allowed on a view: functional.alltypes_view");
533 AnalyzesOk(
"alter table functional_hbase.alltypes rename to new_alltypes");
536 AnalyzesOk(
"alter table functional.alltypes_datasource rename to new_datasrc_tbl");
542 AnalyzesOk(
"alter view functional.alltypes_view as " +
543 "select * from functional.alltypesagg");
545 AnalyzesOk(
"alter view functional.alltypes_view as " +
546 "select * from functional.alltypes_view");
549 AnalyzesOk(
"alter view functional.alltypes_view as " +
550 "select trim('abc'), 17 * 7");
554 "select * from functional.alltypesagg",
555 "ALTER VIEW not allowed on a table: functional.alltypes");
556 AnalysisError(
"alter view functional_hbase.alltypesagg as " +
557 "select * from functional.alltypesagg",
558 "ALTER VIEW not allowed on a table: functional_hbase.alltypesagg");
561 "select * from functional.alltypesagg",
562 "Database does not exist: baddb");
565 "select * from functional.alltypesagg",
566 "Table does not exist: functional.badview");
569 "select * from baddb.alltypesagg",
570 "Could not resolve table reference: 'baddb.alltypesagg'");
573 "select * from functional.badtable",
574 "Could not resolve table reference: 'functional.badtable'");
577 "select * from functional.alltypessmall a inner join " +
578 "functional.alltypessmall b on a.id = b.id",
579 "Duplicate column name: id");
581 AnalysisError(
"alter view functional.alltypes_view as select 'abc' as `???`",
582 "Invalid column/field name: ???");
584 AnalyzesOk(
"alter view functional.alltypes_view as " +
585 "select * from functional.alltypestiny where id in " +
586 "(select id from functional.alltypessmall where int_col = 1)");
591 AnalyzesOk(
"alter view functional.alltypes_view rename to new_view");
592 AnalyzesOk(
"alter view functional.alltypes_view rename to functional.new_view");
593 AnalysisError(
"alter view functional.alltypes_view rename to functional.alltypes",
594 "Table already exists: functional.alltypes");
595 AnalysisError(
"alter view functional.alltypes_view rename to functional.alltypesagg",
596 "Table already exists: functional.alltypesagg");
598 AnalysisError(
"alter view functional.view_does_not_exist rename to new_view",
599 "Table does not exist: functional.view_does_not_exist");
600 AnalysisError(
"alter view db_does_not_exist.alltypes_view rename to new_view",
601 "Database does not exist: db_does_not_exist");
604 "rename to db_does_not_exist.new_view",
605 "Database does not exist: db_does_not_exist");
608 AnalysisError(
"alter view functional.alltypes_view rename to `???`.new_view",
609 "Invalid database name: ???");
610 AnalysisError(
"alter view functional.alltypes_view rename to functional.`%^&`",
611 "Invalid table/view name: %^&");
614 AnalysisError(
"alter view functional.alltypes rename to new_alltypes",
615 "ALTER VIEW not allowed on a table: functional.alltypes");
621 ComputeStatsStmt parsedStmt = (ComputeStatsStmt)parseNode;
638 "Table does not exist: default.tbl_does_not_exist");
641 "COMPUTE STATS not supported for view functional.alltypes_view");
643 AnalyzesOk(
"compute stats functional_avro_snap.alltypes");
648 AnalyzesOk(
"compute stats functional_avro_snap.alltypes_type_mismatch");
651 AnalyzesOk(
"compute stats functional_avro_snap.alltypes_missing_coldef");
654 AnalyzesOk(
"compute stats functional_avro_snap.alltypes_extra_coldef");
656 AnalyzesOk(
"compute stats functional_avro_snap.alltypes_no_coldef");
658 AnalysisError(
"compute stats functional_avro_snap.schema_resolution_test",
659 "Cannot COMPUTE STATS on Avro table 'schema_resolution_test' because its " +
660 "column definitions do not match those in the Avro schema.\nDefinition of " +
661 "column 'col1' of type 'string' does not match the Avro-schema column " +
662 "'boolean1' of type 'BOOLEAN' at position '0'.\nPlease re-create the table " +
663 "with column definitions, e.g., using the result of 'SHOW CREATE TABLE'");
670 "compute incremental stats functional.alltypes partition(year=2010, month=10)");
673 "compute incremental stats functional.alltypes partition(year=9999, month=10)",
674 "Partition spec does not exist: (year=9999, month=10)");
676 "compute incremental stats functional.alltypes partition(year=2010)",
677 "Items in partition spec must exactly match the partition columns in the table " +
678 "definition: functional.alltypes (1 vs 2)");
680 "compute incremental stats functional.alltypes partition(year=2010, month)",
685 "partition(year=2010, month=1, day=NULL)");
687 AnalysisError(
"compute incremental stats functional_hbase.alltypes " +
688 "partition(year=2010, month=1)",
"COMPUTE INCREMENTAL ... PARTITION not " +
689 "supported for non-HDFS table functional_hbase.alltypes");
691 AnalysisError(
"compute incremental stats functional.view_view",
692 "COMPUTE STATS not supported for view functional.view_view");
699 "drop incremental stats functional.alltypes partition(year=2010, month=10)");
701 "drop incremental stats functional.alltypes partition(year=9999, month=10)",
702 "Partition spec does not exist: (year=9999, month=10)");
712 "Table does not exist: default.tbl_does_not_exist");
715 "Database does not exist: no_db");
717 AnalysisError(
"drop stats functional.alltypes partition(year=2010, month=10)",
719 AnalysisError(
"drop stats functional.alltypes partition(year, month)",
727 AnalyzesOk(
"drop view functional.alltypes_view");
732 "Database does not exist: db_does_not_exist");
734 "Database does not exist: db_does_not_exist");
736 "Database does not exist: db_does_not_exist");
739 "Database does not exist: ???");
741 "Table does not exist: functional.%^&");
743 "Table does not exist: functional.@#$%");
748 "Table does not exist: functional.badtable");
750 "Table does not exist: functional.badview");
753 AnalyzesOk(
"drop database if exists db_does_not_exist");
756 AnalyzesOk(
"drop table if exists db_does_not_exist.alltypes");
757 AnalyzesOk(
"drop view if exists db_does_not_exist.alltypes");
760 AnalyzesOk(
"drop table if exists functional.notbl");
761 AnalyzesOk(
"drop view if exists functional.notbl");
765 "DROP TABLE not allowed on a view: functional.alltypes_view");
768 "DROP VIEW not allowed on a table: functional.alltypes");
773 final String DATA_SOURCE_NAME =
"TestDataSource1";
776 catalog_.addDataSource(DATA_SOURCE);
777 AnalyzesOk(
"CREATE DATA SOURCE IF NOT EXISTS " + DATA_SOURCE_NAME +
778 " LOCATION '/foo.jar' CLASS 'foo.Bar' API_VERSION 'V1'");
779 AnalyzesOk(
"CREATE DATA SOURCE IF NOT EXISTS " + DATA_SOURCE_NAME.toLowerCase() +
780 " LOCATION '/foo.jar' CLASS 'foo.Bar' API_VERSION 'V1'");
781 AnalyzesOk(
"CREATE DATA SOURCE IF NOT EXISTS " + DATA_SOURCE_NAME +
782 " LOCATION 'hdfs://localhost:20500/foo.jar' CLASS 'foo.Bar' API_VERSION 'V1'");
783 AnalyzesOk(
"CREATE DATA SOURCE foo LOCATION '/' CLASS '' API_VERSION 'v1'");
784 AnalyzesOk(
"CREATE DATA SOURCE foo LOCATION '/foo.jar' CLASS 'com.bar.Foo' " +
786 AnalyzesOk(
"CREATE DATA SOURCE foo LOCATION '/FOO.jar' CLASS 'COM.BAR.FOO' " +
788 AnalyzesOk(
"CREATE DATA SOURCE foo LOCATION \"/foo.jar\" CLASS \"com.bar.Foo\" " +
789 "API_VERSION \"V1\"");
790 AnalyzesOk(
"CREATE DATA SOURCE foo LOCATION '/x/foo@hi_^!#.jar' " +
791 "CLASS 'com.bar.Foo' API_VERSION 'V1'");
792 AnalyzesOk(
"CREATE DATA SOURCE foo LOCATION 'hdfs://localhost:20500/a/b/foo.jar' " +
793 "CLASS 'com.bar.Foo' API_VERSION 'V1'");
794 AnalyzesOk(
"CREATE DATA SOURCE foo LOCATION 's3n://bucket/a/b/foo.jar' " +
795 "CLASS 'com.bar.Foo' API_VERSION 'V1'");
797 AnalysisError(
"CREATE DATA SOURCE foo LOCATION 'blah://localhost:20500/foo.jar' " +
798 "CLASS 'com.bar.Foo' API_VERSION 'V1'",
799 "No FileSystem for scheme: blah");
800 AnalysisError(
"CREATE DATA SOURCE " + DATA_SOURCE_NAME +
" LOCATION '/foo.jar' " +
801 "CLASS 'foo.Bar' API_VERSION 'V1'",
802 "Data source already exists: " + DATA_SOURCE_NAME.toLowerCase());
803 AnalysisError(
"CREATE DATA SOURCE foo LOCATION '/foo.jar' " +
804 "CLASS 'foo.Bar' API_VERSION 'V2'",
"Invalid API version: 'V2'");
809 AnalyzesOk(
"create database some_new_database");
810 AnalysisError(
"create database functional",
"Database already exists: functional");
811 AnalyzesOk(
"create database if not exists functional");
813 AnalysisError(
"create database `%^&`",
"Invalid database name: %^&");
816 AnalyzesOk(
"create database new_db location " +
817 "'/test-warehouse/new_db'");
818 AnalyzesOk(
"create database new_db location " +
819 "'hdfs://localhost:50200/test-warehouse/new_db'");
820 AnalyzesOk(
"create database new_db location " +
821 "'s3n://bucket/test-warehouse/new_db'");
824 "'blah://bucket/test-warehouse/new_db'",
825 "No FileSystem for scheme: blah");
831 AnalyzesOk(
"create table if not exists newtbl_DNE like parquet "
832 +
"'/test-warehouse/schemas/alltypestiny.parquet'");
833 AnalyzesOk(
"create table newtbl_DNE like parquet "
834 +
"'/test-warehouse/schemas/zipcode_incomes.parquet'");
835 AnalyzesOk(
"create table default.newtbl_DNE like parquet "
836 +
"'/test-warehouse/schemas/zipcode_incomes.parquet'");
837 AnalyzesOk(
"create table newtbl_DNE like parquet "
838 +
"'/test-warehouse/schemas/zipcode_incomes.parquet' STORED AS PARQUET");
839 AnalyzesOk(
"create external table newtbl_DNE like parquet "
840 +
"'/test-warehouse/schemas/zipcode_incomes.parquet' STORED AS PARQUET");
841 AnalyzesOk(
"create table if not exists functional.zipcode_incomes like parquet "
842 +
"'/test-warehouse/schemas/zipcode_incomes.parquet'");
843 AnalyzesOk(
"create table if not exists newtbl_DNE like parquet "
844 +
"'/test-warehouse/schemas/zipcode_incomes.parquet'");
845 AnalyzesOk(
"create table if not exists newtbl_DNE like parquet "
846 +
"'/test-warehouse/schemas/decimal.parquet'");
849 AnalysisError(
"create table functional.zipcode_incomes like parquet "
850 +
"'/test-warehouse/schemas/zipcode_incomes.parquet'",
851 "Table already exists: functional.zipcode_incomes");
852 AnalysisError(
"create table database_DNE.newtbl_DNE like parquet "
853 +
"'/test-warehouse/schemas/zipcode_incomes.parquet'",
854 "Database does not exist: database_DNE");
857 AnalysisError(
"create table if not exists functional.zipcode_incomes like parquet "
858 +
"'/test-warehouse'",
859 "Cannot infer schema, path is not a file: hdfs://localhost:20500/test-warehouse");
860 AnalysisError(
"create table newtbl_DNE like parquet 'foobar'",
861 "URI path must be absolute: foobar");
862 AnalysisError(
"create table newtbl_DNE like parquet '/not/a/file/path'",
863 "Cannot infer schema, path is not a file: "
864 +
"hdfs://localhost:20500/not/a/file/path");
865 AnalysisError(
"create table if not exists functional.zipcode_incomes like parquet "
866 +
"'file:///tmp/foobar'",
867 "Cannot infer schema, path is not a file: file:/tmp/foobar");
870 AnalysisError(
"create table database_DNE.newtbl_DNE like parquet "
871 +
"'/test-warehouse/zipcode_incomes_rc/000000_0'",
872 "File is not a parquet file: "
873 +
"hdfs://localhost:20500/test-warehouse/zipcode_incomes_rc/000000_0");
876 AnalysisError(
"create table if not exists functional.zipcode_incomes like parquet "
877 +
"'/test-warehouse/schemas/malformed_decimal_tiny.parquet'",
878 "Unsupported parquet type FIXED_LEN_BYTE_ARRAY for field c1");
882 +
"'/test-warehouse/schemas/unsupported.parquet'",
883 "Unsupported parquet type for field strct");
885 +
"'/test-warehouse/schemas/map.parquet'",
886 "Unsupported parquet type for field mp");
888 +
"'/test-warehouse/schemas/array.parquet'",
889 "Unsupported parquet type for field lst");
891 +
"'/test-warehouse/schemas/struct.parquet'",
892 "Unsupported parquet type for field strct");
898 AnalyzesOk(
"create table newtbl as select 1+2, 'abc'");
902 AnalyzesOk(
"create table newtbl stored as textfile " +
903 "as select * from functional.jointbl");
904 AnalyzesOk(
"create table newtbl stored as parquetfile " +
905 "as select * from functional.alltypes");
906 AnalyzesOk(
"create table newtbl stored as parquet " +
907 "as select * from functional.alltypes");
908 AnalyzesOk(
"create table newtbl as select int_col from functional.alltypes");
910 AnalyzesOk(
"create table functional.newtbl " +
911 "as select count(*) as CNT from functional.alltypes");
912 AnalyzesOk(
"create table functional.tbl as select a.* from functional.alltypes a " +
913 "join functional.alltypes b on (a.int_col=b.int_col) limit 1000");
916 AnalyzesOk(
"create table functional.newtbl cached in 'testPool'" +
917 " as select count(*) as CNT from functional.alltypes");
918 AnalyzesOk(
"create table functional.newtbl uncached" +
919 " as select count(*) as CNT from functional.alltypes");
922 AnalysisError(
"create table functional.alltypes as select 1",
923 "Table already exists: functional.alltypes");
924 AnalyzesOk(
"create table if not exists functional.alltypes as select 1");
927 AnalysisError(
"create table db_does_not_exist.new_table as select 1",
928 "Database does not exist: db_does_not_exist");
931 AnalysisError(
"create table newtbl as select * from tbl_does_not_exist",
932 "Could not resolve table reference: 'tbl_does_not_exist'");
933 AnalysisError(
"create table newtbl as select 1 as c1, 2 as c1",
934 "Duplicate column name: c1");
937 AnalysisError(
"create table foo stored as sequencefile as select 1",
938 "CREATE TABLE AS SELECT does not support (SEQUENCEFILE) file format. " +
939 "Supported formats are: (PARQUET, TEXTFILE)");
940 AnalysisError(
"create table foo stored as RCFILE as select 1",
941 "CREATE TABLE AS SELECT does not support (RCFILE) file format. " +
942 "Supported formats are: (PARQUET, TEXTFILE)");
945 AnalyzesOk(
"create table test_with as with with_1 as (select 1 as int_col from " +
946 "functional.alltypes as t1 right join (select 1 as int_col from " +
947 "functional.alltypestiny as t1) as t2 on t2.int_col = t1.int_col) " +
948 "select * from with_1 limit 10");
953 AnalyzesOk(
"create table if not exists functional.new_tbl like functional.alltypes");
954 AnalyzesOk(
"create table functional.like_view like functional.view_view");
956 "create table if not exists functional.alltypes like functional.alltypes");
957 AnalysisError(
"create table functional.alltypes like functional.alltypes",
958 "Table already exists: functional.alltypes");
959 AnalysisError(
"create table functional.new_table like functional.tbl_does_not_exist",
960 "Table does not exist: functional.tbl_does_not_exist");
961 AnalysisError(
"create table functional.new_table like db_does_not_exist.alltypes",
962 "Database does not exist: db_does_not_exist");
964 AnalysisError(
"create table `???`.new_table like functional.alltypes",
965 "Invalid database name: ???");
967 AnalysisError(
"create table functional.`^&*` like functional.alltypes",
968 "Invalid table/view name: ^&*");
970 AnalysisError(
"create table functional.foo like `???`.alltypes",
971 "Database does not exist: ???");
972 AnalysisError(
"create table functional.foo like functional.`%^&`",
973 "Table does not exist: functional.%^&");
975 AnalyzesOk(
"create table tbl like functional.alltypes location " +
976 "'/test-warehouse/new_table'");
977 AnalyzesOk(
"create table tbl like functional.alltypes location " +
978 "'hdfs://localhost:20500/test-warehouse/new_table'");
981 AnalyzesOk(
"create table tbl like functional.alltypes location " +
982 "'file:///test-warehouse/new_table'");
983 AnalyzesOk(
"create table tbl like functional.alltypes location " +
984 "'file://test-warehouse/new_table'");
985 AnalyzesOk(
"create table tbl like functional.alltypes location " +
986 "'file:/test-warehouse/new_table'");
987 AnalyzesOk(
"create table tbl like functional.alltypes location " +
988 "'s3n://bucket/test-warehouse/new_table'");
990 AnalysisError(
"create table tbl like functional.alltypes location " +
991 "'foofs://test-warehouse/new_table'",
992 "No FileSystem for scheme: foofs");
993 AnalysisError(
"create table functional.baz like functional.alltypes location ' '",
994 "URI path cannot be empty.");
999 AnalyzesOk(
"create table functional.new_table (i int)");
1000 AnalyzesOk(
"create table if not exists functional.alltypes (i int)");
1002 "Table already exists: functional.alltypes");
1004 "Table already exists: functional.alltypes");
1005 AnalyzesOk(
"create table functional.new_table (i int) row format delimited fields " +
1006 "terminated by '|'");
1008 AnalyzesOk(
"create table new_table (i int) PARTITIONED BY (d decimal)");
1009 AnalyzesOk(
"create table new_table (i int) PARTITIONED BY (d decimal(3,1))");
1010 AnalyzesOk(
"create table new_table(d1 decimal, d2 decimal(10), d3 decimal(5, 2))");
1011 AnalysisError(
"create table new_table (i int) PARTITIONED BY (d decimal(40,1))",
1012 "Decimal precision must be <= 38: 40");
1014 AnalyzesOk(
"create table new_table(s1 varchar(1), s2 varchar(32672))");
1016 "Varchar size must be > 0: 0");
1018 "Varchar size must be <= 65355: 65356");
1020 "Char size must be > 0: 0");
1022 "Char size must be <= 255: 256");
1023 AnalyzesOk(
"create table new_table (i int) PARTITIONED BY (s varchar(3))");
1024 AnalyzesOk(
"create table functional.new_table (c char(250))");
1025 AnalyzesOk(
"create table new_table (i int) PARTITIONED BY (c char(3))");
1028 String [] fileFormats =
1029 {
"TEXTFILE",
"SEQUENCEFILE",
"PARQUET",
"PARQUETFILE",
"RCFILE"};
1030 for (String format: fileFormats) {
1031 AnalyzesOk(String.format(
"create table new_table (i int) " +
1032 "partitioned by (d decimal) comment 'c' stored as %s", format));
1035 "partitioned by (d decimal) comment 'c' stored as %s", format),
1036 "Table requires at least 1 column");
1042 AnalyzesOk(
"create table functional.new_table (i int) row format delimited fields " +
1043 "terminated by '\\t' escaped by '\\\\' lines terminated by '\\n'");
1044 AnalyzesOk(
"create table functional.new_table (i int) row format delimited fields " +
1045 "terminated by '\\001' escaped by '\\002' lines terminated by '\\n'");
1046 AnalyzesOk(
"create table functional.new_table (i int) row format delimited fields " +
1047 "terminated by '-2' escaped by '-3' lines terminated by '\\n'");
1048 AnalyzesOk(
"create table functional.new_table (i int) row format delimited fields " +
1049 "terminated by '-128' escaped by '127' lines terminated by '40'");
1051 AnalysisError(
"create table functional.new_table (i int) row format delimited " +
1052 "fields terminated by '-2' escaped by '128' lines terminated by '\\n'",
1053 "ESCAPED BY values and LINE/FIELD terminators must be specified as a single " +
1054 "character or as a decimal value in the range [-128:127]: 128");
1055 AnalysisError(
"create table functional.new_table (i int) row format delimited " +
1056 "fields terminated by '-2' escaped by '127' lines terminated by '255'",
1057 "ESCAPED BY values and LINE/FIELD terminators must be specified as a single " +
1058 "character or as a decimal value in the range [-128:127]: 255");
1059 AnalysisError(
"create table functional.new_table (i int) row format delimited " +
1060 "fields terminated by '-129' escaped by '127' lines terminated by '\\n'",
1061 "ESCAPED BY values and LINE/FIELD terminators must be specified as a single " +
1062 "character or as a decimal value in the range [-128:127]: -129");
1063 AnalysisError(
"create table functional.new_table (i int) row format delimited " +
1064 "fields terminated by '||' escaped by '\\\\' lines terminated by '\\n'",
1065 "ESCAPED BY values and LINE/FIELD terminators must be specified as a single " +
1066 "character or as a decimal value in the range [-128:127]: ||");
1068 AnalysisError(
"create table db_does_not_exist.new_table (i int)",
1069 "Database does not exist: db_does_not_exist");
1071 "Duplicate column name: I");
1072 AnalysisError(
"create table new_table (c1 double, col2 int, c1 double, c4 string)",
1073 "Duplicate column name: c1");
1074 AnalysisError(
"create table new_table (i int, s string) PARTITIONED BY (i int)",
1075 "Duplicate column name: i");
1076 AnalysisError(
"create table new_table (i int) PARTITIONED BY (C int, c2 int, c int)",
1077 "Duplicate column name: c");
1080 AnalysisError(
"create table new_table (i int) PARTITIONED BY (t timestamp)",
1081 "Type 'TIMESTAMP' is not supported as partition-column type in column: t");
1082 AnalysisError(
"create table new_table (i int) PARTITIONED BY (d date)",
1083 "Type 'DATE' is not supported as partition-column type in column: d");
1084 AnalysisError(
"create table new_table (i int) PARTITIONED BY (d datetime)",
1085 "Type 'DATETIME' is not supported as partition-column type in column: d");
1088 AnalyzesOk(
"create table cached_tbl(i int) partitioned by(j int) " +
1089 "cached in 'testPool'");
1090 AnalyzesOk(
"create table cached_tbl(i int) partitioned by(j int) uncached");
1091 AnalyzesOk(
"create table cached_tbl(i int) partitioned by(j int) " +
1092 "location '/test-warehouse/' cached in 'testPool'");
1093 AnalyzesOk(
"create table cached_tbl(i int) partitioned by(j int) " +
1094 "location '/test-warehouse/' uncached");
1097 AnalysisError(
"create table `???`.new_table (x int) PARTITIONED BY (y int)",
1098 "Invalid database name: ???");
1100 AnalysisError(
"create table functional.`^&*` (x int) PARTITIONED BY (y int)",
1101 "Invalid table/view name: ^&*");
1103 AnalysisError(
"create table new_table (`???` int) PARTITIONED BY (i int)",
1104 "Invalid column/field name: ???");
1105 AnalysisError(
"create table new_table (i int) PARTITIONED BY (`^&*` int)",
1106 "Invalid column/field name: ^&*");
1109 AnalyzesOk(
"create table tbl (i int) location '/test-warehouse/new_table'");
1110 AnalyzesOk(
"create table tbl (i int) location " +
1111 "'hdfs://localhost:20500/test-warehouse/new_table'");
1112 AnalyzesOk(
"create table tbl (i int) location " +
1113 "'file:///test-warehouse/new_table'");
1114 AnalyzesOk(
"create table tbl (i int) location " +
1115 "'s3n://bucket/test-warehouse/new_table'");
1116 AnalyzesOk(
"ALTER TABLE functional_seq_snap.alltypes SET LOCATION " +
1117 "'file://test-warehouse/new_table'");
1120 AnalysisError(
"create table functional.foo (x int) location " +
1121 "'foofs://test-warehouse/new_table'",
1122 "No FileSystem for scheme: foofs");
1123 AnalysisError(
"create table functional.foo (x int) location " +
1124 "' '",
"URI path cannot be empty.");
1125 AnalysisError(
"ALTER TABLE functional_seq_snap.alltypes SET LOCATION " +
1126 "'foofs://test-warehouse/new_table'",
1127 "No FileSystem for scheme: foofs");
1128 AnalysisError(
"ALTER TABLE functional_seq_snap.alltypes SET LOCATION " +
1129 "' '",
"URI path cannot be empty.");
1132 final String DATA_SOURCE_NAME =
"TestDataSource1";
1133 catalog_.addDataSource(
new DataSource(DATA_SOURCE_NAME,
"/foo.jar",
1135 AnalyzesOk(
"CREATE TABLE DataSrcTable1 (x int) PRODUCED BY DATA SOURCE " +
1137 AnalyzesOk(
"CREATE TABLE DataSrcTable1 (x int) PRODUCED BY DATA SOURCE " +
1138 DATA_SOURCE_NAME.toLowerCase());
1139 AnalyzesOk(
"CREATE TABLE DataSrcTable1 (x int) PRODUCED BY DATA SOURCE " +
1140 DATA_SOURCE_NAME +
"(\"\")");
1141 AnalyzesOk(
"CREATE TABLE DataSrcTable1 (a tinyint, b smallint, c int, d bigint, " +
1142 "e float, f double, g boolean, h string) PRODUCED BY DATA SOURCE " +
1144 AnalysisError(
"CREATE TABLE DataSrcTable1 (x int) PRODUCED BY DATA SOURCE " +
1145 "not_a_data_src(\"\")",
"Data source does not exist");
1149 String typeSpec = type.name();
1154 AnalysisError(
"CREATE TABLE DataSrcTable1 (x " + typeSpec +
") PRODUCED " +
1155 "BY DATA SOURCE " + DATA_SOURCE_NAME,
1156 "Tables produced by an external data source do not support the column type: " +
1163 String alltypesSchemaLoc =
1164 "hdfs:///test-warehouse/avro_schemas/functional/alltypes.json";
1169 "create table foo_avro (id int, bool_col boolean, tinyint_col int, " +
1170 "smallint_col int, int_col int, bigint_col bigint, float_col float," +
1171 "double_col double, date_string_col string, string_col string, " +
1172 "timestamp_col timestamp) with serdeproperties ('avro.schema.url'='%s')" +
1173 "stored as avro", alltypesSchemaLoc));
1175 "create table foo_avro (id int, bool_col boolean, tinyint_col int, " +
1176 "smallint_col int, int_col int, bigint_col bigint, float_col float," +
1177 "double_col double, date_string_col string, string_col string, " +
1178 "timestamp_col timestamp) stored as avro tblproperties ('avro.schema.url'='%s')",
1179 alltypesSchemaLoc));
1180 AnalyzesOk(
"create table foo_avro (string1 string) stored as avro tblproperties " +
1181 "('avro.schema.literal'='{\"name\": \"my_record\", \"type\": \"record\", " +
1182 "\"fields\": [{\"name\": \"string1\", \"type\": \"string\"}]}')");
1186 "create table foo_avro with serdeproperties ('avro.schema.url'='%s')" +
1187 "stored as avro", alltypesSchemaLoc));
1189 "create table foo_avro stored as avro tblproperties ('avro.schema.url'='%s')",
1190 alltypesSchemaLoc));
1191 AnalyzesOk(
"create table foo_avro stored as avro tblproperties " +
1192 "('avro.schema.literal'='{\"name\": \"my_record\", \"type\": \"record\", " +
1193 "\"fields\": [{\"name\": \"string1\", \"type\": \"string\"}]}')");
1197 "create table foo_avro (id int) with serdeproperties ('avro.schema.url'='%s')" +
1198 "stored as avro", alltypesSchemaLoc),
1199 "Ignoring column definitions in favor of Avro schema.\n" +
1200 "The Avro schema has 11 column(s) but 1 column definition(s) were given.");
1202 "create table foo_avro (bool_col boolean, string_col string) " +
1203 "stored as avro tblproperties ('avro.schema.url'='%s')",
1205 "Ignoring column definitions in favor of Avro schema.\n" +
1206 "The Avro schema has 11 column(s) but 2 column definition(s) were given.");
1207 AnalyzesOk(
"create table foo_avro (string1 string) stored as avro tblproperties " +
1208 "('avro.schema.literal'='{\"name\": \"my_record\", \"type\": \"record\", " +
1209 "\"fields\": [{\"name\": \"string1\", \"type\": \"string\"}," +
1210 "{\"name\": \"string2\", \"type\": \"string\"}]}')",
1211 "Ignoring column definitions in favor of Avro schema.\n" +
1212 "The Avro schema has 2 column(s) but 1 column definition(s) were given.");
1215 "create table foo_avro (id int, bool_col boolean, tinyint_col int, " +
1216 "smallint_col int, bad_int_col int, bigint_col bigint, float_col float," +
1217 "double_col double, date_string_col string, string_col string, " +
1218 "timestamp_col timestamp) with serdeproperties ('avro.schema.url'='%s')" +
1219 "stored as avro", alltypesSchemaLoc),
1220 "Ignoring column definitions in favor of Avro schema due to a mismatched " +
1221 "column name at position 5.\n" +
1222 "Column definition: bad_int_col INT\n" +
1223 "Avro schema column: int_col INT");
1226 "create table foo_avro (id int, bool_col boolean, tinyint_col int, " +
1227 "smallint_col int, int_col int, bigint_col bigint, float_col float," +
1228 "double_col bigint, date_string_col string, string_col string, " +
1229 "timestamp_col timestamp) stored as avro tblproperties ('avro.schema.url'='%s')",
1231 "Ignoring column definitions in favor of Avro schema due to a mismatched " +
1232 "column type at position 8.\n" +
1233 "Column definition: double_col BIGINT\n" +
1234 "Avro schema column: double_col DOUBLE");
1237 AnalysisError(
"create table foo_avro (i int) stored as avro",
1238 "No Avro schema provided in SERDEPROPERTIES or TBLPROPERTIES for table: " +
1239 "default.foo_avro");
1240 AnalysisError(
"create table foo_avro (i int) stored as avro tblproperties ('a'='b')",
1241 "No Avro schema provided in SERDEPROPERTIES or TBLPROPERTIES for table: " +
1242 "default.foo_avro");
1243 AnalysisError(
"create table foo_avro stored as avro tblproperties ('a'='b')",
1244 "No Avro schema provided in SERDEPROPERTIES or TBLPROPERTIES for table: "+
1245 "default.foo_avro");
1248 AnalysisError(
"create table foo_avro (i int) stored as avro tblproperties " +
1249 "('avro.schema.url'='schema.avsc')",
1250 "Invalid avro.schema.url: schema.avsc. Path does not exist.");
1251 AnalysisError(
"create table foo_avro (i int) stored as avro tblproperties " +
1252 "('avro.schema.url'='hdfs://invalid*host/schema.avsc')",
1253 "Invalid avro.schema.url: hdfs://invalid*host/schema.avsc. " +
1254 "Incomplete HDFS URI, no host: hdfs://invalid*host/schema.avsc");
1255 AnalysisError(
"create table foo_avro (i int) stored as avro tblproperties " +
1256 "('avro.schema.url'='foo://bar/schema.avsc')",
1257 "Invalid avro.schema.url: foo://bar/schema.avsc. No FileSystem for scheme: foo");
1260 AnalyzesOk(
"create table foo_avro (i int) stored as avro tblproperties " +
1261 "('avro.schema.literal'='{\"name\": \"my_record\", \"type\": \"record\", " +
1262 "\"fields\": [{\"name\":\"value\",\"type\":{\"type\":\"bytes\", " +
1263 "\"logicalType\":\"decimal\",\"precision\":5,\"scale\":2}}]}')");
1265 AnalyzesOk(
"create table foo_avro (i int) stored as avro tblproperties " +
1266 "('avro.schema.literal'='{\"name\": \"my_record\", \"type\": \"record\", " +
1267 "\"fields\": [{\"name\":\"value\",\"type\":{\"type\":\"bytes\", " +
1268 "\"logicalType\":\"decimal\",\"precision\":5}}]}')");
1270 AnalysisError(
"create table foo_avro (i int) stored as avro tblproperties " +
1271 "('avro.schema.literal'='{\"name\": \"my_record\", \"type\": \"record\", " +
1272 "\"fields\": [{\"name\":\"value\",\"type\":{\"type\":\"bytes\", " +
1273 "\"logicalType\":\"decimal\",\"scale\":5}}]}')",
1274 "Error parsing Avro schema for table 'default.foo_avro': " +
1275 "No 'precision' property specified for 'decimal' logicalType");
1277 AnalysisError(
"create table foo_avro (i int) stored as avro tblproperties " +
1278 "('avro.schema.literal'='{\"name\": \"my_record\", \"type\": \"record\", " +
1279 "\"fields\": [{\"name\":\"value\",\"type\":{\"type\":\"bytes\", " +
1280 "\"logicalType\":\"decimal\",\"scale\":5, \"precision\":-20}}]}')",
1281 "Error parsing Avro schema for table 'default.foo_avro': " +
1282 "Invalid decimal 'precision' property value: -20");
1283 AnalysisError(
"create table foo_avro (i int) stored as avro tblproperties " +
1284 "('avro.schema.literal'='{\"name\": \"my_record\", \"type\": \"record\", " +
1285 "\"fields\": [{\"name\":\"value\",\"type\":{\"type\":\"bytes\", " +
1286 "\"logicalType\":\"decimal\",\"scale\":-1, \"precision\":20}}]}')",
1287 "Error parsing Avro schema for table 'default.foo_avro': " +
1288 "Invalid decimal 'scale' property value: -1");
1291 AnalysisError(
"create table foo_avro (i int) stored as avro tblproperties " +
1292 "('avro.schema.literal'='{\"name\": \"my_record\", \"type\": \"record\", " +
1293 "\"fields\": {\"name\": \"string1\", \"type\": \"string\"}]}')",
1294 "Error parsing Avro schema for table 'default.foo_avro': " +
1295 "org.codehaus.jackson.JsonParseException: Unexpected close marker ']': "+
1300 AnalysisError(
"create table foo_avro (i int) stored as avro tblproperties " +
1301 "('avro.schema.literal'='{\"name\": \"my_record\", \"type\": \"record\", " +
1302 "\"fields\": [{\"name\": \"string1\", \"type\": \"string\"}," +
1303 "{\"name\": \"list1\", \"type\": {\"type\":\"array\", \"items\": \"int\"}}]}')",
1304 "Error parsing Avro schema for table 'default.foo_avro': " +
1305 "Unsupported type 'array' of column 'list1'");
1307 AnalysisError(
"create table foo_avro (i int) stored as avro tblproperties " +
1308 "('avro.schema.literal'='{\"name\": \"my_record\", \"type\": \"record\", " +
1309 "\"fields\": [{\"name\": \"string1\", \"type\": \"string\"}," +
1310 "{\"name\": \"map1\", \"type\": {\"type\":\"map\", \"values\": \"int\"}}]}')",
1311 "Error parsing Avro schema for table 'default.foo_avro': " +
1312 "Unsupported type 'map' of column 'map1'");
1315 AnalysisError(
"create table foo_avro (i int) stored as avro tblproperties " +
1316 "('avro.schema.literal'='{\"name\": \"my_record\", \"type\": \"record\", " +
1317 "\"fields\": [{\"name\": \"string1\", \"type\": \"string\"}," +
1318 "{\"name\": \"union1\", \"type\": [\"float\", \"boolean\"]}]}')",
1319 "Error parsing Avro schema for table 'default.foo_avro': " +
1320 "Unsupported type 'union' of column 'union1'");
1324 AnalyzesOk(
"create table functional.new_table (" +
1325 "a struct<f1: int, f2: string, f3: timestamp, f4: boolean>, " +
1326 "b struct<f1: struct<f11: int>, f2: struct<f21: struct<f22: string>>>, " +
1327 "c struct<f1: map<int, string>, f2: array<bigint>>," +
1328 "d struct<f1: struct<f11: map<int, string>, f12: array<bigint>>>)");
1330 AnalyzesOk(
"create table functional.new_table (" +
1331 "a array<int>, b array<timestamp>, c array<string>, d array<boolean>, " +
1332 "e array<array<int>>, f array<array<array<string>>>, " +
1333 "g array<struct<f1: int, f2: string>>, " +
1334 "h array<map<string,int>>)");
1336 AnalyzesOk(
"create table functional.new_table (" +
1337 "a map<string, int>, b map<timestamp, boolean>, c map<bigint, float>, " +
1338 "d array<array<int>>, e array<array<array<string>>>, " +
1339 "f array<struct<f1: int, f2: string>>," +
1340 "g array<map<string,int>>)");
1342 AnalysisError(
"create table functional.new_table (i int) " +
1343 "partitioned by (x array<int>)",
1344 "Type 'ARRAY<INT>' is not supported as partition-column type in column: x");
1345 AnalysisError(
"create table functional.new_table (i int) " +
1346 "partitioned by (x map<int,int>)",
1347 "Type 'MAP<INT,INT>' is not supported as partition-column type in column: x");
1348 AnalysisError(
"create table functional.new_table (i int) " +
1349 "partitioned by (x struct<f1:int>)",
1350 "Type 'STRUCT<f1:INT>' is not supported as partition-column type in column: x");
1356 "create view foo_new as select int_col, string_col from functional.alltypes");
1357 AnalyzesOk(
"create view functional.foo as select * from functional.alltypes");
1358 AnalyzesOk(
"create view if not exists foo as select * from functional.alltypes");
1359 AnalyzesOk(
"create view foo (a, b) as select int_col, string_col " +
1360 "from functional.alltypes");
1361 AnalyzesOk(
"create view functional.foo (a, b) as select int_col x, double_col y " +
1362 "from functional.alltypes");
1365 AnalyzesOk(
"create view foo as select * from functional.alltypes_view");
1366 AnalyzesOk(
"create view foo (aaa, bbb) as select * from functional.complex_view");
1369 AnalyzesOk(
"create view foo as select trim('abc'), 17 * 7");
1372 AnalyzesOk(
"create view foo as select * from functional_hbase.alltypesagg");
1376 "select count(distinct x.int_col) from functional.alltypessmall x " +
1377 "inner join functional.alltypessmall y on (x.id = y.id) group by x.bigint_col");
1380 AnalyzesOk(
"create view foo (a, b) as values(1, 'a'), (2, 'b')");
1381 AnalyzesOk(
"create view foo (a, b) as select 1, 'a' union all select 2, 'b'");
1384 AnalyzesOk(
"create view test_view_with_subquery as " +
1385 "select * from functional.alltypestiny t where exists " +
1386 "(select * from functional.alltypessmall s where s.id = t.id)");
1389 AnalysisError(
"create view foo (a) as select int_col, string_col " +
1390 "from functional.alltypes",
1391 "Column-definition list has fewer columns (1) than the " +
1392 "view-definition query statement returns (2).");
1393 AnalysisError(
"create view foo (a, b, c) as select int_col " +
1394 "from functional.alltypes",
1395 "Column-definition list has more columns (3) than the " +
1396 "view-definition query statement returns (1).");
1398 AnalysisError(
"create view foo as select * from functional.alltypessmall a " +
1399 "inner join functional.alltypessmall b on a.id = b.id",
1400 "Duplicate column name: id");
1402 AnalysisError(
"create view foo (a, b, a) as select int_col, int_col, int_col " +
1403 "from functional.alltypes",
1404 "Duplicate column name: a");
1407 AnalysisError(
"create view `???`.new_view as select 1, 2, 3",
1408 "Invalid database name: ???");
1410 "Invalid table/view name: ^%&");
1412 "Invalid column/field name: ???");
1414 "Invalid column/field name: %^&");
1418 "select * from functional.alltypessmall ",
1419 "Table already exists: functional.alltypes");
1422 "select * from functional.alltypessmall ",
1423 "Database does not exist: wrongdb");
1426 "select * from wrongdb.alltypessmall ",
1427 "Could not resolve table reference: 'wrongdb.alltypessmall'");
1430 "select * from wrongdb.alltypessmall ",
1431 "Could not resolve table reference: 'wrongdb.alltypessmall'");
1434 "select int_col from functional.alltypessmall union all " +
1435 "select string_col from functional.alltypes",
1436 "Incompatible return types 'INT' and 'STRING' of exprs " +
1437 "'int_col' and 'string_col'.");
1442 "select int_array_col, int_map_col, int_struct_col " +
1443 "from functional.allcomplextypes",
1444 "Expr 'int_array_col' in select list returns a complex type 'ARRAY<INT>'.\n" +
1445 "Only scalar types are allowed in the select list.");
1450 final String symbol =
1451 "'_Z8IdentityPN10impala_udf15FunctionContextERKNS_10BooleanValE'";
1452 final String udfSuffix =
" LOCATION '/test-warehouse/libTestUdfs.so' " +
1454 final String udfSuffixIr =
" LOCATION '/test-warehouse/test-udfs.ll' " +
1456 final String hdfsPath =
"hdfs://localhost:20500/test-warehouse/libTestUdfs.so";
1458 AnalyzesOk(
"create function foo() RETURNS int" + udfSuffix);
1459 AnalyzesOk(
"create function foo(int, int, string) RETURNS int" + udfSuffix);
1462 AnalyzesOk(
"create function functional.B() RETURNS int" + udfSuffix);
1463 AnalyzesOk(
"create function functional.B1() RETURNS int" + udfSuffix);
1464 AnalyzesOk(
"create function functional.`B1C`() RETURNS int" + udfSuffix);
1467 AnalyzesOk(
"create function A_B() RETURNS int" + udfSuffix);
1470 AnalyzesOk(
"create function foo() RETURNS int LOCATION " +
1471 "'/test-warehouse/libTestUdfs.so' " +
1472 "SYMBOL='_Z8IdentityPN10impala_udf15FunctionContextERKNS_10BooleanValE'");
1473 AnalysisError(
"create function foo() RETURNS int LOCATION " +
1474 "'/test-warehouse/libTestUdfs.ll' " +
1475 "SYMBOL='_Z8IdentityPN10impala_udf15FunctionContextERKNS_10BooleanValE'",
1476 "Could not load binary: /test-warehouse/libTestUdfs.ll");
1477 AnalyzesOk(
"create function foo() RETURNS int LOCATION " +
1478 "'/test-warehouse/test-udfs.ll' " +
1479 "SYMBOL='_Z8IdentityPN10impala_udf15FunctionContextERKNS_10BooleanValE'");
1480 AnalyzesOk(
"create function foo(int) RETURNS int LOCATION " +
1481 "'/test-warehouse/test-udfs.ll' SYMBOL='Identity'");
1483 AnalyzesOk(
"create function foo() RETURNS int LOCATION " +
1484 "'/test-warehouse/libTestUdfs.SO' " +
1485 "SYMBOL='_Z8IdentityPN10impala_udf15FunctionContextERKNS_10BooleanValE'");
1486 AnalyzesOk(
"create function foo() RETURNS int LOCATION " +
1487 "'/test-warehouse/hive-exec.jar' SYMBOL='a'");
1490 AnalysisError(
"create function foo() RETURNS timestamp LOCATION '/test-warehouse/hive-exec.jar' SYMBOL='a'",
1491 "Hive UDFs that use TIMESTAMP are not yet supported.");
1492 AnalysisError(
"create function foo(timestamp) RETURNS int LOCATION '/a.jar'",
1493 "Hive UDFs that use TIMESTAMP are not yet supported.");
1494 AnalysisError(
"create function foo() RETURNS decimal LOCATION '/a.jar'",
1495 "Hive UDFs that use DECIMAL are not yet supported.");
1496 AnalysisError(
"create function foo(Decimal) RETURNS int LOCATION '/a.jar'",
1497 "Hive UDFs that use DECIMAL are not yet supported.");
1498 AnalysisError(
"create function foo(char(5)) RETURNS int LOCATION '/a.jar'",
1499 "UDFs that use CHAR are not yet supported.");
1500 AnalysisError(
"create function foo(varchar(5)) RETURNS int LOCATION '/a.jar'",
1501 "UDFs that use VARCHAR are not yet supported.");
1502 AnalysisError(
"create function foo() RETURNS CHAR(5) LOCATION '/a.jar'",
1503 "UDFs that use CHAR are not yet supported.");
1504 AnalysisError(
"create function foo() RETURNS VARCHAR(5) LOCATION '/a.jar'",
1505 "UDFs that use VARCHAR are not yet supported.");
1506 AnalysisError(
"create function foo() RETURNS CHAR(5)" + udfSuffix,
1507 "UDFs that use CHAR are not yet supported.");
1508 AnalysisError(
"create function foo() RETURNS VARCHAR(5)" + udfSuffix,
1509 "UDFs that use VARCHAR are not yet supported.");
1510 AnalysisError(
"create function foo(CHAR(5)) RETURNS int" + udfSuffix,
1511 "UDFs that use CHAR are not yet supported.");
1512 AnalysisError(
"create function foo(VARCHAR(5)) RETURNS int" + udfSuffix,
1513 "UDFs that use VARCHAR are not yet supported.");
1515 AnalyzesOk(
"create function foo() RETURNS decimal" + udfSuffix);
1516 AnalyzesOk(
"create function foo() RETURNS decimal(38,10)" + udfSuffix);
1517 AnalyzesOk(
"create function foo(Decimal, decimal(10, 2)) RETURNS int" + udfSuffix);
1518 AnalysisError(
"create function foo() RETURNS decimal(100)" + udfSuffix,
1519 "Decimal precision must be <= 38: 100");
1520 AnalysisError(
"create function foo(Decimal(2, 3)) RETURNS int" + udfSuffix,
1521 "Decimal scale (3) must be <= precision (2)");
1524 AnalyzesOk(
"create function foo(INT...) RETURNS int" + udfSuffix);
1527 AnalyzesOk(
"create function foo() returns int" + udfSuffix
1528 +
" prepare_fn='ValidateOpenPrepare'" +
" close_fn='ValidateOpenClose'");
1529 AnalyzesOk(
"create function foo() returns int" + udfSuffixIr
1530 +
" prepare_fn='ValidateOpenPrepare'" +
" close_fn='ValidateOpenClose'");
1531 AnalyzesOk(
"create function foo() returns int" + udfSuffixIr
1532 +
" prepare_fn='_Z19ValidateOpenPreparePN10impala_udf15FunctionContextENS0_18FunctionStateScopeE'"
1533 +
" close_fn='_Z17ValidateOpenClosePN10impala_udf15FunctionContextENS0_18FunctionStateScopeE'");
1534 AnalysisError(
"create function foo() returns int" + udfSuffix +
" prepare_fn=''",
1535 "Could not find symbol ''");
1536 AnalysisError(
"create function foo() returns int" + udfSuffix +
" close_fn=''",
1537 "Could not find symbol ''");
1538 AnalysisError(
"create function foo() returns int" + udfSuffix +
1539 " prepare_fn='FakePrepare'",
1540 "Could not find function FakePrepare(impala_udf::FunctionContext*, "+
1541 "impala_udf::FunctionContext::FunctionStateScope) in: ");
1544 AnalysisError(
"create function sin(double) RETURNS double" + udfSuffix,
1545 "Function cannot have the same name as a builtin: sin");
1546 AnalysisError(
"create function sin() RETURNS double" + udfSuffix,
1547 "Function cannot have the same name as a builtin: sin");
1550 AnalysisError(
"create function foo() RETURNS int LOCATION 'bad-location' SYMBOL='c'",
1551 "URI path must be absolute: bad-location");
1552 AnalysisError(
"create function foo() RETURNS int LOCATION " +
1553 "'blah://localhost:50200/bad-location' SYMBOL='c'",
1554 "No FileSystem for scheme: blah");
1555 AnalysisError(
"create function foo() RETURNS int LOCATION " +
1556 "'file:///foo.jar' SYMBOL='c'",
1557 "Could not load binary: file:///foo.jar");
1560 AnalysisError(
"create function foo() RETURNS int LOCATION '/binary' SYMBOL='a'",
1561 "Unknown binary type: '/binary'. Binary must end in .jar, .so or .ll");
1562 AnalysisError(
"create function foo() RETURNS int LOCATION '/binary.a' SYMBOL='a'",
1563 "Unknown binary type: '/binary.a'. Binary must end in .jar, .so or .ll");
1564 AnalysisError(
"create function foo() RETURNS int LOCATION '/binary.so.' SYMBOL='a'",
1565 "Unknown binary type: '/binary.so.'. Binary must end in .jar, .so or .ll");
1568 AnalysisError(
"create function foo() RETURNS int LOCATION '/binary.so'",
1569 "Argument 'SYMBOL' must be set.");
1572 AnalysisError(
"create function foo() RETURNS int LOCATION '/blah.so' " +
1573 "SYMBOL='ab'",
"Could not load binary: /blah.so");
1574 AnalysisError(
"create function foo() RETURNS int LOCATION '/binary.JAR' SYMBOL='a'",
1575 "Could not load binary: /binary.JAR");
1577 "LOCATION '/test-warehouse/libTestUdfs.so' " +
1578 "SYMBOL='b'",
"Could not find function b() in: " + hdfsPath);
1580 "LOCATION '/test-warehouse/libTestUdfs.so' " +
1581 "SYMBOL=''",
"Could not find symbol ''");
1583 "LOCATION '/test-warehouse/libTestUdfs.so' " +
1585 "Could not find symbol '_ZAB' in: " + hdfsPath);
1588 AnalyzesOk(
"create function foo() RETURNS int " +
1589 "LOCATION '/test-warehouse/libTestUdfs.so' " +
"SYMBOL='NoArgs'");
1591 AnalyzesOk(
"create function foo() RETURNS double " +
1592 "LOCATION '/test-warehouse/libTestUdfs.so' " +
"SYMBOL='NoArgs'");
1595 "LOCATION '/test-warehouse/libTestUdfs.so' " +
"SYMBOL='noArgs'",
1596 "Could not find function noArgs() in: " + hdfsPath);
1599 "LOCATION '/test-warehouse/libTestUdfs.so' " +
"SYMBOL='NoArgs'",
1600 "Could not find function NoArgs(INT) in: " + hdfsPath);
1603 AnalyzesOk(
"create function identity(boolean) RETURNS int " +
1604 "LOCATION '/test-warehouse/libTestUdfs.so' " +
"SYMBOL='Identity'");
1605 AnalyzesOk(
"create function identity(tinyint) RETURNS int " +
1606 "LOCATION '/test-warehouse/libTestUdfs.so' " +
"SYMBOL='Identity'");
1607 AnalyzesOk(
"create function identity(smallint) RETURNS int " +
1608 "LOCATION '/test-warehouse/libTestUdfs.so' " +
"SYMBOL='Identity'");
1609 AnalyzesOk(
"create function identity(int) RETURNS int " +
1610 "LOCATION '/test-warehouse/libTestUdfs.so' " +
"SYMBOL='Identity'");
1611 AnalyzesOk(
"create function identity(bigint) RETURNS int " +
1612 "LOCATION '/test-warehouse/libTestUdfs.so' " +
"SYMBOL='Identity'");
1613 AnalyzesOk(
"create function identity(float) RETURNS int " +
1614 "LOCATION '/test-warehouse/libTestUdfs.so' " +
"SYMBOL='Identity'");
1615 AnalyzesOk(
"create function identity(double) RETURNS int " +
1616 "LOCATION '/test-warehouse/libTestUdfs.so' " +
"SYMBOL='Identity'");
1617 AnalyzesOk(
"create function identity(string) RETURNS int " +
1618 "LOCATION '/test-warehouse/libTestUdfs.so' " +
"SYMBOL='Identity'");
1619 AnalyzesOk(
"create function all_types_fn(string, boolean, tinyint, " +
1620 "smallint, int, bigint, float, double, decimal) returns int " +
1621 "location '/test-warehouse/libTestUdfs.so' symbol='AllTypes'");
1624 AnalysisError(
"create function 123A() RETURNS int" + udfSuffix,
1625 "Function cannot start with a digit: 123a");
1626 AnalysisError(
"create function A.`1A`() RETURNS int" + udfSuffix,
1627 "Function cannot start with a digit: 1a");
1628 AnalysisError(
"create function A.`ABC-D`() RETURNS int" + udfSuffix,
1629 "Function names must be all alphanumeric or underscore. Invalid name: abc-d");
1630 AnalysisError(
"create function baddb.f() RETURNS int" + udfSuffix,
1631 "Database does not exist: baddb");
1632 AnalysisError(
"create function a.b.c() RETURNS int" + udfSuffix,
1633 "Invalid function name: 'a.b.c'. Expected [dbname].funcname.");
1634 AnalysisError(
"create function a.b.c.d(smallint) RETURNS int" + udfSuffix,
1635 "Invalid function name: 'a.b.c.d'. Expected [dbname].funcname.");
1638 AnalysisError(
"create function f() RETURNS array<int>" + udfSuffix,
1639 "Type 'ARRAY<INT>' is not supported in UDFs/UDAs.");
1640 AnalysisError(
"create function f(map<string,int>) RETURNS int" + udfSuffix,
1641 "Type 'MAP<STRING,INT>' is not supported in UDFs/UDAs.");
1642 AnalysisError(
"create function f() RETURNS struct<f:int>" + udfSuffix,
1643 "Type 'STRUCT<f:INT>' is not supported in UDFs/UDAs.");
1646 AnalysisError(
"create function f() RETURNS array<int>" + udfSuffix,
1647 "Type 'ARRAY<INT>' is not supported in UDFs/UDAs.");
1648 AnalysisError(
"create function f(map<string,int>) RETURNS int" + udfSuffix,
1649 "Type 'MAP<STRING,INT>' is not supported in UDFs/UDAs.");
1650 AnalysisError(
"create function f() RETURNS struct<f:int>" + udfSuffix,
1651 "Type 'STRUCT<f:INT>' is not supported in UDFs/UDAs.");
1655 AnalysisError(
"drop function foo()",
"Function does not exist: foo()");
1656 AnalyzesOk(
"drop function if exists a.foo()");
1657 AnalysisError(
"drop function a.foo()",
"Database does not exist: a");
1659 AnalyzesOk(
"drop function if exists foo(int...)");
1660 AnalyzesOk(
"drop function if exists foo(double, int...)");
1667 AnalysisError(
"create function TestFn() RETURNS INT " + udfSuffix,
1668 "Function already exists: testfn()");
1669 AnalysisError(
"create function TestFn(double) RETURNS INT " + udfSuffix,
1670 "Function already exists: testfn(DOUBLE)");
1673 AnalysisError(
"create function TestFn(double...) RETURNS INT" + udfSuffix,
1674 "Function already exists: testfn(DOUBLE)");
1675 AnalysisError(
"create function TestFn(double) RETURNS INT" + udfSuffix,
1676 "Function already exists: testfn(DOUBLE)");
1680 AnalyzesOk(
"drop function TestFn(int, int)");
1682 "Function does not exist: testfn(INT, INT, INT)");
1685 AnalysisError(
"create function TestFn(String) RETURNS INT" + udfSuffix,
1686 "Function already exists: testfn(STRING...)");
1687 AnalysisError(
"create function TestFn(String...) RETURNS INT" + udfSuffix,
1688 "Function already exists: testfn(STRING...)");
1689 AnalysisError(
"create function TestFn(String, String) RETURNS INT" + udfSuffix,
1690 "Function already exists: testfn(STRING...)");
1691 AnalyzesOk(
"create function TestFn(String, String, Int) RETURNS INT" + udfSuffix);
1694 AnalyzesOk(
"create function TestFn(int) RETURNS INT " + udfSuffix);
1697 AnalyzesOk(
"create function functional.TestFn() RETURNS INT " + udfSuffix);
1701 AnalyzesOk(
"drop function TestFn(string...)");
1703 "Function does not exist: testfn(DOUBLE...)");
1704 AnalysisError(
"drop function TestFn(int)",
"Function does not exist: testfn(INT)");
1706 "drop function functional.TestFn()",
"Function does not exist: testfn()");
1708 AnalysisError(
"create function f() returns int " + udfSuffix +
1709 "init_fn='a'",
"Optional argument 'INIT_FN' should not be set");
1710 AnalysisError(
"create function f() returns int " + udfSuffix +
1711 "serialize_fn='a'",
"Optional argument 'SERIALIZE_FN' should not be set");
1712 AnalysisError(
"create function f() returns int " + udfSuffix +
1713 "merge_fn='a'",
"Optional argument 'MERGE_FN' should not be set");
1714 AnalysisError(
"create function f() returns int " + udfSuffix +
1715 "finalize_fn='a'",
"Optional argument 'FINALIZE_FN' should not be set");
1720 final String loc =
" LOCATION '/test-warehouse/libTestUdas.so' ";
1721 final String hdfsLoc =
"hdfs://localhost:20500/test-warehouse/libTestUdas.so";
1722 AnalyzesOk(
"create aggregate function foo(int) RETURNS int" + loc +
1723 "UPDATE_FN='AggUpdate'");
1724 AnalyzesOk(
"create aggregate function foo(int) RETURNS int" + loc +
1725 "UPDATE_FN='AggUpdate' INIT_FN='AggInit'");
1726 AnalyzesOk(
"create aggregate function foo(int) RETURNS int" + loc +
1727 "UPDATE_FN='AggUpdate' INIT_FN='AggInit' MERGE_FN='AggMerge'");
1728 AnalysisError(
"create aggregate function foo(int) RETURNS int" + loc +
1729 "UPDATE_FN='AggUpdate' INIT_FN='AGgInit'",
1730 "Could not find function AGgInit() returns INT in: " + hdfsLoc);
1731 AnalyzesOk(
"create aggregate function foo(int, int) RETURNS int" + loc +
1732 "UPDATE_FN='AggUpdate'");
1733 AnalyzesOk(
"create aggregate function foo(string, double) RETURNS string" + loc +
1734 "UPDATE_FN='AggUpdate'");
1737 AnalysisError(
"create aggregate function foo(int...) RETURNS int" + loc,
1738 "UDAs with varargs are not yet supported.");
1740 +
"foo(int, int, int, int, int, int, int , int, int) "
1741 +
"RETURNS int" + loc,
1742 "UDAs with more than 8 arguments are not yet supported.");
1746 " UPDATE_FN='_Z9AggUpdatePN10impala_udf15FunctionContextERKNS_6IntValEPS2_' " +
1747 "INIT_FN='_Z7AggInitPN10impala_udf15FunctionContextEPNS_6IntValE' " +
1748 "MERGE_FN='_Z8AggMergePN10impala_udf15FunctionContextERKNS_6IntValEPS2_'";
1749 AnalysisError(
"create aggregate function foo(CHAR(5)) RETURNS int" + loc + symbols,
1750 "UDAs with CHAR arguments are not yet supported.");
1751 AnalysisError(
"create aggregate function foo(VARCHAR(5)) RETURNS int" + loc + symbols,
1752 "UDAs with VARCHAR arguments are not yet supported.");
1753 AnalysisError(
"create aggregate function foo(int) RETURNS CHAR(5)" + loc + symbols,
1754 "UDAs with CHAR return type are not yet supported.");
1755 AnalysisError(
"create aggregate function foo(int) RETURNS VARCHAR(5)" + loc + symbols,
1756 "UDAs with VARCHAR return type are not yet supported.");
1762 AnalysisError(
"create aggregate function foo(int) RETURNS int" + loc +
1763 "UPDATE_FN='_Z9AggUpdatePN10impala_udf15FunctionContextERKNS_6IntValEPS2_'",
1764 "Could not infer symbol for init() function.");
1765 AnalysisError(
"create aggregate function foo(int) RETURNS int" + loc +
1766 "UPDATE_FN='_Z9AggUpdatePN10impala_udf15FunctionContextERKNS_6IntValEPS2_' " +
1767 "INIT_FN='_Z7AggInitPN10impala_udf15FunctionContextEPNS_6IntValE'",
1768 "Could not infer symbol for merge() function.");
1769 AnalyzesOk(
"create aggregate function foo(int) RETURNS int" + loc +
1770 "UPDATE_FN='_Z9AggUpdatePN10impala_udf15FunctionContextERKNS_6IntValEPS2_' " +
1771 "INIT_FN='_Z7AggInitPN10impala_udf15FunctionContextEPNS_6IntValE' " +
1772 "MERGE_FN='_Z8AggMergePN10impala_udf15FunctionContextERKNS_6IntValEPS2_'");
1777 AnalyzesOk(
"create aggregate function foo(int) RETURNS int " +
1778 "INTERMEDIATE int" + loc +
"UPDATE_FN='AggUpdate'");
1779 AnalysisError(
"create aggregate function foo(int) RETURNS int " +
1780 "INTERMEDIATE double" + loc +
"UPDATE_FN='AggUpdate'",
1781 "UDAs with an intermediate type, DOUBLE, that is different from the " +
1782 "return type, INT, are currently not supported.");
1783 AnalysisError(
"create aggregate function foo(int) RETURNS int " +
1784 "INTERMEDIATE char(10)" + loc +
"UPDATE_FN='AggUpdate'",
1785 "UDAs with an intermediate type, CHAR(10), that is different from the " +
1786 "return type, INT, are currently not supported.");
1787 AnalysisError(
"create aggregate function foo(int) RETURNS int " +
1788 "INTERMEDIATE decimal(10)" + loc +
"UPDATE_FN='AggUpdate'",
1789 "UDAs with an intermediate type, DECIMAL(10,0), that is different from the " +
1790 "return type, INT, are currently not supported.");
1791 AnalysisError(
"create aggregate function foo(int) RETURNS int " +
1792 "INTERMEDIATE decimal(40)" + loc +
"UPDATE_FN='AggUpdate'",
1793 "Decimal precision must be <= 38: 40");
1805 AnalysisError(
"create aggregate function foo(int) RETURNS int" + loc +
"SYMBOL='Bad'",
1806 "Optional argument 'SYMBOL' should not be set.");
1809 AnalysisError(
"create aggregate function foo(int) RETURNS int " +
1810 "INTERMEDIATE CHAR(0) LOCATION '/foo.so' UPDATE_FN='b'",
1811 "Char size must be > 0: 0");
1812 AnalysisError(
"create aggregate function foo() RETURNS int" + loc,
1813 "UDAs must take at least one argument.");
1814 AnalysisError(
"create aggregate function foo(int) RETURNS int LOCATION " +
1815 "'/foo.jar' UPDATE_FN='b'",
"Java UDAs are not supported.");
1818 AnalysisError(
"create aggregate function foo(string, double) RETURNS array<int> " +
1819 loc +
"UPDATE_FN='AggUpdate'",
1820 "Type 'ARRAY<INT>' is not supported in UDFs/UDAs.");
1821 AnalysisError(
"create aggregate function foo(map<string,int>) RETURNS int " +
1822 loc +
"UPDATE_FN='AggUpdate'",
1823 "Type 'MAP<STRING,INT>' is not supported in UDFs/UDAs.");
1824 AnalysisError(
"create aggregate function foo(int) RETURNS struct<f:int> " +
1825 loc +
"UPDATE_FN='AggUpdate'",
1826 "Type 'STRUCT<f:INT>' is not supported in UDFs/UDAs.");
1829 AnalysisError(
"create aggregate function foo(int) RETURNS int LOCATION " +
1830 "'/foo.ll' UPDATE_FN='Fn'",
"IR UDAs are not yet supported.");
1838 AnalysisError(
"create aggregate function foo(string, double) RETURNS string" + loc +
1839 "UPDATE_FN='Agg'",
"Could not infer symbol for init() function.");
1840 AnalysisError(
"create aggregate function foo(string, double) RETURNS string" + loc +
1841 "UPDATE_FN='Agg' INIT_FN='AggInit'",
1842 "Could not infer symbol for merge() function.");
1843 AnalyzesOk(
"create aggregate function foo(string, double) RETURNS string" + loc +
1844 "UPDATE_FN='Agg' INIT_FN='AggInit' MERGE_FN='AggMerge'");
1845 AnalyzesOk(
"create aggregate function foo(string, double) RETURNS string" + loc +
1846 "UPDATE_FN='Agg' INIT_FN='AggInit' MERGE_FN='AggMerge' " +
1847 "SERIALIZE_FN='AggSerialize'");
1848 AnalyzesOk(
"create aggregate function foo(string, double) RETURNS string" + loc +
1849 "UPDATE_FN='Agg' INIT_FN='AggInit' MERGE_FN='AggMerge' " +
1850 "SERIALIZE_FN='AggSerialize' FINALIZE_FN='AggFinalize'");
1853 AnalyzesOk(
"create aggregate function foo(string, double) RETURNS string" + loc +
1854 "UPDATE_FN='AggUpdate' SERIALIZE_FN='AggSerialize' FINALIZE_FN='AggSerialize'");
1855 AnalyzesOk(
"create aggregate function foo(string, double) RETURNS string" + loc +
1856 "UPDATE_FN='AggUpdate' SERIALIZE_FN='AggFinalize' FINALIZE_FN='AggFinalize'");
1860 AnalysisError(
"create aggregate function foo(string, double) RETURNS string" + loc +
1861 "UPDATE_FN='AggUpdate' INIT_FN='AggSerialize'",
1862 "Could not find function AggSerialize() returns STRING in: " + hdfsLoc);
1866 AnalyzesOk(
"create aggregate function foo(string, double) RETURNS string" + loc +
1867 "UPDATE_FN='AggUpdate' "+
1868 "INIT_FN='_Z12AggSerializePN10impala_udf15FunctionContextERKNS_6IntValE'");
1869 AnalysisError(
"create aggregate function foo(string, double) RETURNS string" + loc +
1870 "UPDATE_FN='AggUpdate' INIT_FN='_ZAggSerialize'",
1871 "Could not find symbol '_ZAggSerialize' in: " + hdfsLoc);
1874 AnalysisError(
"create aggregate function foo(string, double) RETURNS string" + loc +
1875 "UPDATE_FN='Agg2Update'",
1876 "Could not find function Agg2Init() returns STRING in: " + hdfsLoc);
1877 AnalysisError(
"create aggregate function foo(string, double) RETURNS string" + loc +
1878 "UPDATE_FN='Agg2Update' INIT_FN='AggInit'",
1879 "Could not find function Agg2Merge(STRING) returns STRING in: " + hdfsLoc);
1880 AnalyzesOk(
"create aggregate function foo(string, double) RETURNS string" + loc +
1881 "UPDATE_FN='Agg2Update' INIT_FN='AggInit' MERGE_FN='AggMerge'");
1882 AnalysisError(
"create aggregate function foo(string, double) RETURNS string" + loc +
1883 "UPDATE_FN='Agg2Update' INIT_FN='AggInit' MERGE_FN='BadFn'",
1884 "Could not find function BadFn(STRING) returns STRING in: " + hdfsLoc);
1885 AnalysisError(
"create aggregate function foo(string, double) RETURNS string" + loc +
1886 "UPDATE_FN='Agg2Update' INIT_FN='AggInit' MERGE_FN='AggMerge' "+
1887 "FINALIZE_FN='not there'",
1888 "Could not find function not there(STRING) in: " + hdfsLoc);
1896 for (String typeDefStr: typeDefs) {
1898 AnalyzesOk(String.format(
"CREATE TABLE t (i ARRAY<%s>)", typeDefStr));
1899 AnalyzesOk(String.format(
"CREATE TABLE t (i STRUCT<f:%s>)", typeDefStr));
1903 Type t = createTableStmt.getColumnDefs().
get(0).getType();
1907 "CREATE TABLE t (i MAP<int, %s>)", typeDefStr, typeDefStr));
1910 "CREATE TABLE t (i MAP<%s, %s>)", typeDefStr, typeDefStr));
1920 AnalysisError(String.format(
"CREATE TABLE t (i %s)", typeDef), expectedError);
1948 "Decimal scale (10) must be <= precision (1)");
1950 "Decimal precision must be > 0: 0");
1952 "Decimal precision must be <= 38");
1959 TypeDefsAnalyzeOk(
"STRUCT<a:TINYINT COMMENT 'x', b:BIGINT, c:DOUBLE COMMENT 'y'>");
1963 "Map type cannot have a complex-typed key: MAP<ARRAY<INT>,INT>");
1966 "Duplicate field name 'f1' in struct 'STRUCT<f1:INT,f2:STRING,f1:FLOAT>'");
1969 "Invalid struct field name: ???");
1975 AnalysisError(
"use db_does_not_exist",
"Database does not exist: db_does_not_exist");
1985 AnalyzesOk(
"describe formatted functional.alltypes");
1988 "Database does not exist: nodb");
1990 "Table does not exist: functional.notbl");
1996 AnalyzesOk(
"show databases like '*pattern'");
1999 AnalyzesOk(
"show data sources like '*pattern'");
2004 for (String fnType:
new String[]{
"",
"aggregate",
"analytic"}) {
2005 AnalyzesOk(String.format(
"show %s functions", fnType));
2006 AnalyzesOk(String.format(
"show %s functions like '*pattern'", fnType));
2007 AnalyzesOk(String.format(
"show %s functions in functional", fnType));
2009 "show %s functions in functional like '*pattern'", fnType));
2012 AnalysisError(
"show functions in baddb",
"Database does not exist: baddb");
2014 "Database does not exist: baddb");
2020 AnalyzesOk(String.format(
"show files in functional.emptytable"));
2022 String[] partitions =
new String[] {
"",
"partition(month=10, year=2010)" };
2023 for (String partition: partitions) {
2024 AnalyzesOk(String.format(
"show files in functional.alltypes %s", partition));
2026 AnalysisError(String.format(
"show files in baddb.alltypes %s", partition),
2027 "Database does not exist: baddb");
2028 AnalysisError(String.format(
"show files in functional.badtbl %s", partition),
2029 "Table does not exist: functional.badtbl");
2031 AnalysisError(String.format(
"show files in functional.alltypes_view %s",
2033 "SHOW FILES not applicable to a non hdfs table: functional.alltypes_view");
2037 AnalysisError(
"show files in functional.alltypes partition(year=2010,int_col=1)",
2038 "Column 'int_col' is not a partition column in table: functional.alltypes");
2040 AnalysisError(
"show files in functional.alltypes partition(year=2010,day=1)",
2041 "Partition column 'day' not found in table: functional.alltypes");
2043 AnalysisError(
"show files in functional.tinyinttable partition(int_col=1)",
2044 "Table is not partitioned: functional.tinyinttable");
2046 AnalysisError(
"show files in functional.alltypes partition(year=2010,month=NULL)",
2047 "Partition spec does not exist: (year=2010, month=NULL)");
2052 String[] statsQuals =
new String[] {
"table",
"column"};
2053 for (String qual : statsQuals) {
2054 AnalyzesOk(String.format(
"show %s stats functional.alltypes", qual));
2056 AnalysisError(String.format(
"show %s stats baddb.alltypes", qual),
2057 "Database does not exist: baddb");
2058 AnalysisError(String.format(
"show %s stats functional.badtbl", qual),
2059 "Table does not exist: functional.badtbl");
2061 AnalysisError(String.format(
"show %s stats functional.alltypes_view", qual),
2062 String.format(
"SHOW %s STATS not applicable to a view: " +
2063 "functional.alltypes_view", qual.toUpperCase()));
2069 AnalyzesOk(
"show partitions functional.alltypes");
2071 "Database does not exist: baddb");
2073 "Table does not exist: functional.badtbl");
2075 "Table is not partitioned: functional.alltypesnopart");
2077 "SHOW PARTITIONS not applicable to a view: functional.view_view");
2079 "SHOW PARTITIONS must target an HDFS table: functional_hbase.alltypes");
2088 String location =
"/test-warehouse/.tmp_" + UUID.randomUUID().toString();
2089 Path parentPath = FileSystemUtil.createFullyQualifiedPath(
new Path(location));
2090 FileSystem fs = null;
2092 fs = parentPath.getFileSystem(FileSystemUtil.getConfiguration());
2095 AnalyzesOk(String.format(
"create table new_table (col INT) location '%s/new_table'",
2097 String.format(
"Path '%s' cannot be reached: Path does not exist.",
2101 AnalyzesOk(String.format(
"create table new_table (col INT) location " +
2102 "'%s/new_table/'", location),
2103 String.format(
"Path '%s' cannot be reached: Path does not exist.",
2106 AnalyzesOk(String.format(
"create table new_table location '%s/new_table' " +
2107 "as select 1, 1", location),
2108 String.format(
"Path '%s' cannot be reached: Path does not exist.",
2111 AnalyzesOk(String.format(
"create table new_table like functional.alltypes " +
2112 "location '%s/new_table'", location),
2113 String.format(
"Path '%s' cannot be reached: Path does not exist.",
2116 AnalyzesOk(String.format(
"create database new_db location '%s/new_db'",
2118 String.format(
"Path '%s' cannot be reached: Path does not exist.",
2121 fs.mkdirs(parentPath);
2123 FSDataOutputStream out =
2124 fs.create(
new Path(parentPath,
"test_loaddata/testdata.txt"));
2127 fs.setPermission(parentPath,
2128 new FsPermission(FsAction.NONE, FsAction.NONE, FsAction.NONE));
2131 AnalyzesOk(String.format(
"create data Source serverlog location " +
2132 "'%s/foo.jar' class 'foo.Bar' API_VERSION 'V1'", location),
2133 String.format(
"Impala does not have READ access to path '%s'", parentPath));
2135 AnalyzesOk(String.format(
"create external table new_table (col INT) location " +
2136 "'%s/new_table'", location),
2137 String.format(
"Impala does not have READ_WRITE access to path '%s'",
2140 AnalyzesOk(String.format(
"alter table functional.insert_string_partitioned " +
2141 "add partition (s2='hello') location '%s/new_partition'", location),
2142 String.format(
"Impala does not have READ_WRITE access to path '%s'",
2145 AnalyzesOk(String.format(
"alter table functional.stringpartitionkey " +
2146 "partition(string_col = 'partition1') set location '%s/new_part_loc'", location),
2147 String.format(
"Impala does not have READ_WRITE access to path '%s'",
2151 fs.setPermission(parentPath,
2152 new FsPermission(FsAction.READ_WRITE, FsAction.NONE, FsAction.NONE));
2154 AnalyzesOk(String.format(
"create external table new_table (col INT) location " +
2155 "'%s/new_table'", location));
2156 }
catch (IOException e) {
2157 throw new AnalysisException(e.getMessage(), e);
2161 if (fs != null && fs.exists(parentPath)) {
2162 fs.delete(parentPath,
true);
2164 }
catch (IOException e) {
void TestCreateDataSource()
void TypeDefsAnalyzeOk(String...typeDefs)
void TestAlterTableAddDropPartition()
void checkComputeStatsStmt(String stmt)
static boolean isSupportedPrimitiveType(PrimitiveType primitiveType)
void TestCreateTableLike()
static final ScalarType STRING
void TestCreateTableLikeFile()
void TestPermissionValidation()
void TestAlterTableSetCached()
void TestCreateAvroTest()
void TestDropIncrementalStats()
ParseNode AnalyzesOk(String stmt)
void TestComputeIncrementalStats()
void TestAlterTableRename()
void TestAlterTableAddReplaceColumns()
void TestAlterTableDropColumn()
static final ScalarType DOUBLE
String getColStatsQuery()
void TestShowPartitions()
static final ScalarType INT
uint64_t Test(T *ht, const ProbeTuple *input, uint64_t num_tuples)
Function addTestFunction(String name, ArrayList< ScalarType > args, boolean varArgs)
String getTblStatsQuery()
void TypeDefAnalysisError(String typeDef, String expectedError)
void TestCreateTableAsSelect()
void TestAlterViewRename()
void AnalysisError(String stmt)
void TestAlterTableChangeColumn()
static ArrayList< ScalarType > getSupportedTypes()