在执行建表语句时遇到报错:Display all 649 possibilities? (y or n)
经过一些调查,发现是建表语句中有tab的问题。下面,SHOKO和你细说一下如何处理:
第一步:排查错误。根据日志进行排查,我的报错如下
hive>
>
> CREATE TABLE IF NOT EXISTS ods_table_count_check(
> data_date string default CURRENT_DATE() COMMENT 'dt',
> database_name string
> table_name string
> Display all 649 possibilities? (y or n)
! !=
$ELEM$ $KEY$
$VALUE$ $elem$
$key$ $sum0
$value$ %
& (
) );
* +
, -
. /
: <
<= <=>
<> =
== >
>= ABORT
ACTIVATE ADD
ALL ALLOC_FRACTION
ALTER AND
ARRAY AS
ASC BIGINT
BINARY BOOLEAN
BUCKET BUCKETS
BY CAST
CHECK CLUSTER
CLUSTERED COLLECTION
COLUMNS COMMENT
COMPACT COMPACTIONS
CONSTRAINT CREATE
DATA DATE
DATETIME DEFAULT
DEFINED DELIMITED
DESC DESCRIBE
DIRECTORY DISABLE
DISTINCT DISTRIBUTE
DO DOUBLE
DROP ENABLE
EXCEPT EXPLAIN
EXTENDED EXTERNAL
FALSE FIELDS
FLOAT FOREIGN
FORMAT FROM
FULL FUNCTION
GROUP INPATH
INPUTFORMAT INSERT
INT INTERSECT
INTO IS
ITEMS JOIN
KEY KEYS
KILL LAST
LEFT LIKE
LIMIT LINES
LOAD LOCAL
LOCATION MAP
MOVE MSCK
NONE NORELY
NOT NOVALIDATE
NULL NULLS
OF OFFSET
ON OR
ORDER OUT
OUTER OUTPUTFORMAT
OVERWRITE PARTITION
PARTITIONED PARTITIONS
PATH PLAN
PLANS POOL
PRECISION PRIMARY
PURGE QUERY
QUERY_PARALLELISM REDUCE
REFERENCES REGEXP
RELY RENAME
REPLACE RESOURCE
REWRITE RIGHT
RLIKE ROW
SCHEDULING_POLICY SELECT
SEQUENCEFILE SERDE
SERDEPROPERTIES SET
SHOW SMALLINT
SORT SORTED
STORED STRING
SUBQUERY SYNC
TABLE TABLES
TABLESAMPLE TBLPROPERTIES
TEMPORARY TERMINATED
TEXTFILE TIME
TIMESTAMP TIMESTAMPLOCALTZ
TINYINT TO
TRANSACTIONS TRANSFORM
TRUE UNION
UNIQUE UPDATE
USING VALIDATE
VALUES WAIT
WHERE WITH
ZONE [
\' ]
^ abort
abs( acos(
activate add
add_months( ads.decrypt
ads.get_json_arr aes_decrypt(
aes_encrypt( all
alloc_fraction alter
and and(
array array(
array_contains( as
asc ascii(
asin( assert_true(
assert_true_oom( atan(
avg( base64
between( bigint
bigint( bin(
binary binary(
bloom_filter( boolean
boolean( bround(
bucket buckets
by cardinality_violation(
case( cast
cbrt( ceil(
ceiling( char(
char_length( character_length(
check chr(
cluster clustered
coalesce( collect_list(
collect_set( collection
columns comment
compact compactions
compute_stats( concat(
concat_ws( constraint
context_ngrams( conv(
corr( cos(
count( covar_pop(
covar_samp( crc32
create create_union(
cume_dist( current_authorizer(
current_database( current_date(
current_groups( current_timestamp(
current_user( data
date date(
date_add( date_format(
date_sub( datediff(
datetime day(
dayofmonth( dayofweek(
decimal( decode(
default default.decrypt
default.encrypt default.filter
default.get_json_arr default.t_code
defined degrees(
delimited dense_rank(
desc describe
directory disable
distinct distribute
div( do
double double(
drop dwd.decrypt
dwd.encrypt dwd.filter
dwd.get_json_arr e(
elt( enable
encode( enforce_constraint(
except exp(
explain explode(
extended external
extract_union( factorial(
false field(
fields find_in_set(
first_value( float
float( floor(
floor_day( floor_hour(
floor_minute( floor_month(
floor_quarter( floor_second(
floor_week( floor_year(
foreign format
format_number( from
from_unixtime( from_utc_timestamp(
full function
get_json_object( get_splits(
greatest( group
grouping( hash(
hex( histogram_numeric(
hour( if(
in( in_bloom_filter(
in_file( index(
initcap( inline(
inpath inputformat
insert instr(
int int(
internal_interval( intersect
interval_day_time( interval_year_month(
into is
isfalse( isnotfalse(
isnotnull( isnottrue(
isnull( istrue(
items java_method(
join json_tuple(
key keys
kill lag(
last last_day(
last_value( lcase(
lead( least(
left length(
levenshtein( like
like( likeall(
likeany( limit
lines ln(
load local
locate( location
log( log10
log2 logged_in_user(
lower( lpad(
ltrim( map
map( map_keys(
map_values( mask(
mask_first_n( mask_hash(
mask_last_n( mask_show_first_n(
mask_show_last_n( matchpath(
max( md5
min( minute(
mod( month(
months_between( move
msck murmur_hash(
named_struct( negative(
next_day( ngrams(
none noop(
noopstreaming( noopwithmap(
noopwithmapstreaming( norely
not not(
novalidate ntile(
null nullif(
nulls nvl(
octet_length( ods.decrypt
ods.decrypt_22 ods.decrypt_23
ods.encrypt ods.get_json_arr
of offset
on or
or( order
out outer
outputformat overwrite
parse_url( parse_url_tuple(
partition partitioned
partitions path
percent_rank( percentile(
percentile_approx( pi(
plan plans
pmod( pool
posexplode( positive(
pow( power(
precision primary
printf( purge
quarter( query
query_parallelism radians(
rand( rank(
reduce references
reflect( reflect2
regexp regexp(
regexp_extract( regexp_replace(
regr_avgx( regr_avgy(
regr_count( regr_intercept(
regr_r2 regr_slope(
regr_sxx( regr_sxy(
regr_syy( rely
rename repeat(
replace replace(
replicate_rows( resource
restrict_information_schema( reverse(
rewrite right
rlike rlike(
round( row
row_number( rpad(
rtrim( scheduling_policy
second( select
sentences( sequencefile
serde serdeproperties
set sha(
sha1 sha2
shiftleft( shiftright(
shiftrightunsigned( show
sign( sin(
size( smallint
smallint( sort
sort_array( sort_array_by(
sorted soundex(
space( split(
sq_count_check( sqrt(
stack( std(
stddev( stddev_pop(
stddev_samp( stored
str_to_map( string
string( struct(
subquery substr(
substring( substring_index(
sum( sync
table tables
tablesample tan(
tblproperties temporary
terminated textfile
time timestamp
timestamp with local time zone timestamp(
timestamplocaltz tinyint
tinyint( to
to_date( to_epoch_milli(
to_unix_timestamp( to_utc_timestamp(
transactions transform
translate( trim(
true trunc(
ucase( udftoboolean(
udftobyte( udftodouble(
udftofloat( udftointeger(
udftolong( udftoshort(
udftostring( unbase64
unhex( union
unique unix_timestamp(
update upper(
using uuid(
validate values
var_pop( var_samp(
varchar( variance(
version( wait
weekofyear( when(
where width_bucket(
windowingtablefunction( with
xpath( xpath_boolean(
xpath_double( xpath_float(
xpath_int( xpath_long(
xpath_number( xpath_short(
xpath_string( year(
zone |
~
> _column string default ' ',
> table_type string ',
> add_count bigint,
> Display all 649 possibilities? (y or n)
> e_column_sum double default -1.0 COMMENT '单列的数值求和,没有指定列,默认-1.0',
> total_count bigint COMMENT '全表记录数'
> );
NoViableAltException(12@[2389:1: columnNameTypeOrConstraint : ( ( tableConstraint ) | ( columnNameTypeConstraint ) );])
at org.antlr.runtime.DFA.noViableAlt(DFA.java:158)
at org.antlr.runtime.DFA.predict(DFA.java:144)
at org.apache.hadoop.hive.ql.parse.HiveParser.columnNameTypeOrConstraint(HiveParser.java:34044)
at org.apache.hadoop.hive.ql.parse.HiveParser.columnNameTypeOrConstraintList(HiveParser.java:29840)
at org.apache.hadoop.hive.ql.parse.HiveParser.createTableStatement(HiveParser.java:6662)
at org.apache.hadoop.hive.ql.parse.HiveParser.ddlStatement(HiveParser.java:4295)
at org.apache.hadoop.hive.ql.parse.HiveParser.execStatement(HiveParser.java:2494)
at org.apache.hadoop.hive.ql.parse.HiveParser.statement(HiveParser.java:1420)
at org.apache.hadoop.hive.ql.parse.ParseDriver.parse(ParseDriver.java:220)
at org.apache.hadoop.hive.ql.parse.ParseUtils.parse(ParseUtils.java:74)
at org.apache.hadoop.hive.ql.parse.ParseUtils.parse(ParseUtils.java:67)
at org.apache.hadoop.hive.ql.Driver.compile(Driver.java:616)
at org.apache.hadoop.hive.ql.Driver.compileInternal(Driver.java:1826)
at org.apache.hadoop.hive.ql.Driver.compileAndRespond(Driver.java:1773)
at org.apache.hadoop.hive.ql.Driver.compileAndRespond(Driver.java:1768)
at org.apache.hadoop.hive.ql.reexec.ReExecDriver.compileAndRespond(ReExecDriver.java:126)
at org.apache.hadoop.hive.ql.reexec.ReExecDriver.run(ReExecDriver.java:214)
at org.apache.hadoop.hive.cli.CliDriver.processLocalCmd(CliDriver.java:239)
at org.apache.hadoop.hive.cli.CliDriver.processCmd(CliDriver.java:188)
at org.apache.hadoop.hive.cli.CliDriver.processLine(CliDriver.java:402)
at org.apache.hadoop.hive.cli.CliDriver.executeDriver(CliDriver.java:821)
at org.apache.hadoop.hive.cli.CliDriver.run(CliDriver.java:759)
at org.apache.hadoop.hive.cli.CliDriver.main(CliDriver.java:683)
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.lang.reflect.Method.invoke(Method.java:498)
at org.apache.hadoop.util.RunJar.run(RunJar.java:323)
at org.apache.hadoop.util.RunJar.main(RunJar.java:236)
FAILED: ParseException line 5:0 cannot recognize input near '_column' 'string' 'default' in column name or constraint
hive>
经过上面的日志,可以发现,有两个地方报错是一样的,具体如下:
第二步: 检查语句。根据上面定位到的语句,找到语句中对应的字段行。自己再琢磨这2行以及结合网络查找资料,发现确实是tab的问题
重新删除,再用空格调整格式即可查询成功
注意: hive中tab具有特殊含义,能用空格用空格,尽量别用tab键