I am not able to remove a column using ALTER from an existing hive external table
Hive version is hive 1.1 inside CDH 5.5
hive> create external table alter_test(id int,name string)
> row format delimited
> fields terminated by ','
> location '/user/cloudera/conf_files';
OK
Time taken: 0.132 seconds
hive> select * from alter_test;
OK
100 surender
101 raja
Time taken: 0.141 seconds, Fetched: 2 row(s)
hive> alter table alter_test ADD COLUMNS (deviceid string,mode string,channels int,action_name string,data_countt int);
OK
Time taken: 0.2 seconds
hive> show create table alter_test;
OK
CREATE EXTERNAL TABLE `alter_test`(
`id` int,
`name` string,
`deviceid` string,
`mode` string,
`channels` int,
`action_name` string,
`data_countt` int)
ROW FORMAT DELIMITED
FIELDS TERMINATED BY ','
STORED AS INPUTFORMAT
'org.apache.hadoop.mapred.TextInputFormat'
OUTPUTFORMAT
'org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat'
LOCATION
'hdfs://nameservice1/user/cloudera/conf_files'
TBLPROPERTIES (
'COLUMN_STATS_ACCURATE'='false',
'last_modified_by'='build',
'last_modified_time'='1500048081',
'numFiles'='0',
'numRows'='-1',
'rawDataSize'='-1',
'totalSize'='0',
'transient_lastDdlTime'='1500048081')
Time taken: 0.049 seconds, Fetched: 25 row(s)
hive> select * from alter_test;
OK
100 surender NULL NULL NULL NULL NULL
101 raja NULL NULL NULL NULL NULL
Time taken: 0.123 seconds, Fetched: 2 row(s)
hive> alter table alter_test drop deviceid;
MismatchedTokenException(26!=187)
at org.antlr.runtime.BaseRecognizer.recoverFromMismatchedToken(BaseRecognizer.java:617)
at org.antlr.runtime.BaseRecognizer.match(BaseRecognizer.java:115)
at org.apache.hadoop.hive.ql.parse.HiveParser_IdentifiersParser.dropPartitionSpec(HiveParser_IdentifiersParser.java:10571)
at org.apache.hadoop.hive.ql.parse.HiveParser.dropPartitionSpec (HiveParser.java:44608)
at org.apache.hadoop.hive.ql.parse.HiveParser.alterStatementSuffixDropPartitions(HiveParser.java:11198)
at org.apache.hadoop.hive.ql.parse.HiveParser.alterTableStatementSuffix(HiveParser.java:7748)
at org.apache.hadoop.hive.ql.parse.HiveParser.alterStatement(HiveParser.java:6960)
at org.apache.hadoop.hive.ql.parse.HiveParser.ddlStatement(HiveParser.java:2409)
at org.apache.hadoop.hive.ql.parse.HiveParser.execStatement(HiveParser.java:1586)
at org.apache.hadoop.hive.ql.parse.HiveParser.statement(HiveParser.java:1062)
at org.apache.hadoop.hive.ql.parse.ParseDriver.parse(ParseDriver.java:199)
at org.apache.hadoop.hive.ql.parse.ParseDriver.parse(ParseDriver.java:166)
at org.apache.hadoop.hive.ql.Driver.compile(Driver.java:393)
at org.apache.hadoop.hive.ql.Driver.compile(Driver.java:305)
at org.apache.hadoop.hive.ql.Driver.compileInternal(Driver.java:1110)
at org.apache.hadoop.hive.ql.Driver.runInternal(Driver.java:1158)
at org.apache.hadoop.hive.ql.Driver.run(Driver.java:1047)
at org.apache.hadoop.hive.ql.Driver.run(Driver.java:1037)
at org.apache.hadoop.hive.cli.CliDriver.processLocalCmd(CliDriver.java:207)
at org.apache.hadoop.hive.cli.CliDriver.processCmd(CliDriver.java:159)
at org.apache.hadoop.hive.cli.CliDriver.processLine(CliDriver.java:370)
at org.apache.hadoop.hive.cli.CliDriver.executeDriver(CliDriver.java:756)
at org.apache.hadoop.hive.cli.CliDriver.run(CliDriver.java:675)
at org.apache.hadoop.hive.cli.CliDriver.main(CliDriver.java:615)
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57)
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.lang.reflect.Method.invoke(Method.java:606)
at org.apache.hadoop.util.RunJar.run(RunJar.java:221)
at org.apache.hadoop.util.RunJar.main(RunJar.java:136)
FAILED: ParseException line 1:28 mismatched input 'deviceid' expecting PARTITION near 'drop' in drop partition statement
hive> alter table alter_test drop column deviceid;
MismatchedTokenException(57!=187)
at org.antlr.runtime.BaseRecognizer.recoverFromMismatchedToken(BaseRecognizer.java:617)
at org.antlr.runtime.BaseRecognizer.match(BaseRecognizer.java:115)
at org.apache.hadoop.hive.ql.parse.HiveParser_IdentifiersParser.dropPartitionSpec(HiveParser_IdentifiersParser.java:10571)
at org.apache.hadoop.hive.ql.parse.HiveParser.dropPartitionSpec(HiveParser.java:44608)
at org.apache.hadoop.hive.ql.parse.HiveParser.alterStatementSuffixDropPartitions(HiveParser.java:11198)
at org.apache.hadoop.hive.ql.parse.HiveParser.alterTableStatementSuffix(HiveParser.java:7748)
at org.apache.hadoop.hive.ql.parse.HiveParser.alterStatement(HiveParser.java:6960)
at org.apache.hadoop.hive.ql.parse.HiveParser.ddlStatement(HiveParser.java:2409)
at org.apache.hadoop.hive.ql.parse.HiveParser.execStatement(HiveParser.java:1586)
at org.apache.hadoop.hive.ql.parse.HiveParser.statement(HiveParser.java:1062)
at org.apache.hadoop.hive.ql.parse.ParseDriver.parse(ParseDriver.java:199)
at org.apache.hadoop.hive.ql.parse.ParseDriver.parse(ParseDriver.java:166)
at org.apache.hadoop.hive.ql.Driver.compile(Driver.java:393)
at org.apache.hadoop.hive.ql.Driver.compile(Driver.java:305)
at org.apache.hadoop.hive.ql.Driver.compileInternal(Driver.java:1110)
at org.apache.hadoop.hive.ql.Driver.runInternal(Driver.java:1158)
at org.apache.hadoop.hive.ql.Driver.run(Driver.java:1047)
at org.apache.hadoop.hive.ql.Driver.run(Driver.java:1037)
at org.apache.hadoop.hive.cli.CliDriver.processLocalCmd(CliDriver.java:207)
at org.apache.hadoop.hive.cli.CliDriver.processCmd(CliDriver.java:159)
at org.apache.hadoop.hive.cli.CliDriver.processLine(CliDriver.java:370)
at org.apache.hadoop.hive.cli.CliDriver.executeDriver(CliDriver.java:756)
at org.apache.hadoop.hive.cli.CliDriver.run(CliDriver.java:675)
at org.apache.hadoop.hive.cli.CliDriver.main(CliDriver.java:615)
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57)
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.lang.reflect.Method.invoke(Method.java:606)
at org.apache.hadoop.util.RunJar.run(RunJar.java:221)
at org.apache.hadoop.util.RunJar.main(RunJar.java:136)
FAILED: ParseException line 1:28 mismatched input 'column' expecting PARTITION near 'drop' in drop partition statement
Is there any workaround to come out of this issue?
There is currently no command in Hive for dropping a column.
A column could be dropped implicitly by defining a new set of columns -
alter table alter_test
replace columns
(id int,name string,mode string,channels int,action_name string,data_countt int)
;
However, in your case it raises an exception -
Unable to alter table.
The following columns have types incompatible
with the existing columns in their respective positions :
channels,data_countt,
Query: alter table alter_test replace columns
(id int,name string,mode string,channels int,action_name
string,data_countt int).
We can work around it by doing it in 2 phases -
1.
alter table alter_test
replace columns
(id int,name string)
;
2.
alter table alter_test
add columns
(mode string,channels int,action_name string,data_countt int)
;
P.s.
Just to make it clear -
All the change are done in the metadata level only.
The data is not being changed.
P.s. 2
And off course you can drop the external table and recreate it...
it's better to create a new table a,then insert into table a from old table,after this drop the old table ,finally rename a to the old table name.
Related
I have a hive external table partitioned on dt(string).
Some values in partitioned column are none, which means that these rows go into the null partition(dt=HIVE_DEFAULT_PARTITION). I want to update location for this partition.
0: jdbc:hive2://localhost:10000/> alter table `zdb.table` partition(dt=__HIVE_DEFAULT_PARTITION__) set location "s3a://path/zdb.db/table/dt=__HIVE_DEFAULT_PARTITION__";
Error: Error while compiling statement: FAILED: ParseException line 1:71 missing \' at ')' near '<EOF>' (state=42000,code=40000)
0: jdbc:hive2://localhost:10000/> alter table `zdb.table` partition(dt=null) set location "s3a://path/zdb.db/table/dt=__HIVE_DEFAULT_PARTITION__";
Error: Error while compiling statement: FAILED: SemanticException [Error 10248]: Cannot add partition column dt of type void as it cannot be converted to type string (state=42000,code=10248)
0: jdbc:hive2://localhost:10000/> alter table `zdb.table` partition(dt="__HIVE_DEFAULT_PARTITION__") set location "s3a://path/zdb.db/table/dt=__HIVE_DEFAULT_PARTITION__";
Error: org.apache.hive.service.cli.HiveSQLException: Error while processing statement: FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask. Unable to alter partition. Unable to alter partitions because table or database does not exist.
at org.apache.hive.service.cli.operation.Operation.toSQLException(Operation.java:380)
at org.apache.hive.service.cli.operation.SQLOperation.runQuery(SQLOperation.java:257)
at org.apache.hive.service.cli.operation.SQLOperation.access$800(SQLOperation.java:91)
at org.apache.hive.service.cli.operation.SQLOperation$BackgroundWork$1.run(SQLOperation.java:348)
at java.security.AccessController.doPrivileged(Native Method)
at javax.security.auth.Subject.doAs(Subject.java:422)
at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1844)
at org.apache.hive.service.cli.operation.SQLOperation$BackgroundWork.run(SQLOperation.java:363)
at java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:511)
at java.util.concurrent.FutureTask.run(FutureTask.java:266)
at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)
at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)
at java.lang.Thread.run(Thread.java:748)
Caused by: org.apache.hadoop.hive.ql.metadata.HiveException: Unable to alter partition. Unable to alter partitions because table or database does not exist.
at org.apache.hadoop.hive.ql.metadata.Hive.alterPartitions(Hive.java:743)
at org.apache.hadoop.hive.ql.exec.DDLTask.alterTable(DDLTask.java:3592)
at org.apache.hadoop.hive.ql.exec.DDLTask.execute(DDLTask.java:390)
at org.apache.hadoop.hive.ql.exec.Task.executeTask(Task.java:199)
at org.apache.hadoop.hive.ql.exec.TaskRunner.runSequential(TaskRunner.java:100)
at org.apache.hadoop.hive.ql.Driver.launchTask(Driver.java:2183)
at org.apache.hadoop.hive.ql.Driver.execute(Driver.java:1839)
at org.apache.hadoop.hive.ql.Driver.runInternal(Driver.java:1526)
at org.apache.hadoop.hive.ql.Driver.run(Driver.java:1237)
at org.apache.hadoop.hive.ql.Driver.run(Driver.java:1232)
at org.apache.hive.service.cli.operation.SQLOperation.runQuery(SQLOperation.java:255)
... 11 more
Caused by: InvalidOperationException(message:Unable to alter partitions because table or database does not exist.)
at org.apache.hadoop.hive.metastore.api.ThriftHiveMetastore$alter_partitions_with_environment_context_result$alter_partitions_with_environment_context_resultStandardScheme.read(ThriftHiveMetastore.java)
at org.apache.hadoop.hive.metastore.api.ThriftHiveMetastore$alter_partitions_with_environment_context_result$alter_partitions_with_environment_context_resultStandardScheme.read(ThriftHiveMetastore.java)
at org.apache.hadoop.hive.metastore.api.ThriftHiveMetastore$alter_partitions_with_environment_context_result.read(ThriftHiveMetastore.java)
at org.apache.thrift.TServiceClient.receiveBase(TServiceClient.java:86)
at org.apache.hadoop.hive.metastore.api.ThriftHiveMetastore$Client.recv_alter_partitions_with_environment_context(ThriftHiveMetastore.java:2843)
at org.apache.hadoop.hive.metastore.api.ThriftHiveMetastore$Client.alter_partitions_with_environment_context(ThriftHiveMetastore.java:2827)
at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.alter_partitions(HiveMetaStoreClient.java:1533)
at sun.reflect.GeneratedMethodAccessor166.invoke(Unknown Source)
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.lang.reflect.Method.invoke(Method.java:498)
at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.invoke(RetryingMetaStoreClient.java:173)
at com.sun.proxy.$Proxy34.alter_partitions(Unknown Source)
at sun.reflect.GeneratedMethodAccessor166.invoke(Unknown Source)
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.lang.reflect.Method.invoke(Method.java:498)
at org.apache.hadoop.hive.metastore.HiveMetaStoreClient$SynchronizedHandler.invoke(HiveMetaStoreClient.java:2336)
at com.sun.proxy.$Proxy34.alter_partitions(Unknown Source)
at org.apache.hadoop.hive.ql.metadata.Hive.alterPartitions(Hive.java:739)
... 21 more (state=08S01,code=1)
Also, I noticed that drop partition is working
0: jdbc:hive2://localhost:10000/> alter table `zdb.table` drop partition(dt="__HIVE_DEFAULT_PARTITION__") ;
No rows affected (0.08 seconds)
Using the same syntax with add partition fails
0: jdbc:hive2://localhost:10000/> alter table `zdb.table` add partition(dt="__HIVE_DEFAULT_PARTITION__") location "s3a://path/zdb.db/table/dt=__HIVE_DEFAULT_PARTITION__";
Error: Error while compiling statement: FAILED: SemanticException [Error 10111]: Partition value contains a reserved substring (User value: __HIVE_DEFAULT_PARTITION__ Reserved substring: __HIVE_DEFAULT_PARTITION__) (state=42000,code=10111)
For my use-case, I cannot use msck repair table <table-name>. Please suggest if there is a work-around for this
enclose __HIVE_DEFAULT_PARTITION__ in quotes("") then you are able to set the location for default partition.
alter table `zdb.table` partition(dt="__HIVE_DEFAULT_PARTITION__") set location "s3a://path/zdb.db/table/dt=__HIVE_DEFAULT_PARTITION__";
We can't add __HIVE_DEFAULT_PARTITION__(as this is an reserved key word in hive) to the hive table but we can solve this issue using workaround.
INSERT INTO zdb.table PARTITION(DT) SELECT * FROM (SELECT <all columns except dt>,CAST(NULL AS STRING) DT )T;
Now __HIVE_DEFAULT_PARTITION__ will be available then we can use alter set location to change the partition location.
If i have only one table such as student and table definition and schema is such as
hive>
create table student1(S_Id int,
> S_name Varchar(100),
> Address Struct<a:int, b:String, c:int>,
> marks Map<String, Int>);
OK
Time taken: 0.439 seconds
hive>
hive> Describe Student1;
OK
s_id int
s_name varchar(100)
address struct<a:int,b:string,c:int>
marks map<string,int>
Time taken: 0.112 seconds, Fetched: 4 row(s)
Now i am trying to insert values into that Student1 table such as
hive> insert into table student1 values(1, 'Afzal', Struct(42, 'nelson Ave NY', 08309),MAP("MATH", 89));
I am getting that error
FAILED: SemanticException [Error 10293]: Unable to create temp file for insert values Expression of type TOK_FUNCTION not supported in insert/values
How do i insert values for one record in one go, Can anyone please help me?
It works when using insert .. select statement. Create a dummy table with single row, or use some existing table + add limit 1. Also use named_struct function:
Demo:
hive> insert into table student1
select 1 s_id,
'Afzal' s_name,
named_struct('a',42, 'b','nelson Ave NY', 'c',08309) address,
MAP('MATH', 89) marks
from default.dual limit 1; --this is dummy table
Loading data to table dev.student1
Table dev.student1 stats: [numFiles=1, numRows=1, totalSize=48, rawDataSize=37]
OK
Time taken: 27.175 seconds
Check data:
hive> select * from student1;
OK
1 Afzal {"a":42,"b":"nelson Ave NY","c":8309} {"MATH":89}
Time taken: 0.125 seconds, Fetched: 1 row(s)
I created the following table in hive.
CREATE TABLE IF NOT EXISTS employee (
eid int,
name String,
salary String,
destination String
)
COMMENT ‘Employee details’
ROW FORMAT DELIMITED
FIELDS TERMINATED BY ‘\t’
LINES TERMINATED BY ‘\n’
STORED AS TEXTFILE;
The table is created successfully. I am trying to do the following insert
insert into TABLE employee (eid,name,salary,destination) VALUES (1,'avi','100000','boston');
However, I am getting the following error messages.
NoViableAltException(283#[])
at org.apache.hadoop.hive.ql.parse.HiveParser.regularBody(HiveParser.java:39678)
at org.apache.hadoop.hive.ql.parse.HiveParser.queryStatementExpressionBody(HiveParser.java:38904)
at org.apache.hadoop.hive.ql.parse.HiveParser.queryStatementExpression(HiveParser.java:38780)
at org.apache.hadoop.hive.ql.parse.HiveParser.execStatement(HiveParser.java:1514)
at org.apache.hadoop.hive.ql.parse.HiveParser.statement(HiveParser.java:1052)
at org.apache.hadoop.hive.ql.parse.ParseDriver.parse(ParseDriver.java:199)
at org.apache.hadoop.hive.ql.parse.ParseDriver.parse(ParseDriver.java:166)
at org.apache.hadoop.hive.ql.Driver.compile(Driver.java:389)
at org.apache.hadoop.hive.ql.Driver.compile(Driver.java:303)
at org.apache.hadoop.hive.ql.Driver.compileInternal(Driver.java:1067)
at org.apache.hadoop.hive.ql.Driver.runInternal(Driver.java:1129)
at org.apache.hadoop.hive.ql.Driver.run(Driver.java:1004)
at org.apache.hadoop.hive.ql.Driver.run(Driver.java:994)
at org.apache.hadoop.hive.cli.CliDriver.processLocalCmd(CliDriver.java:201)
at org.apache.hadoop.hive.cli.CliDriver.processCmd(CliDriver.java:153)
at org.apache.hadoop.hive.cli.CliDriver.processLine(CliDriver.java:364)
at org.apache.hadoop.hive.cli.CliDriver.executeDriver(CliDriver.java:712)
at org.apache.hadoop.hive.cli.CliDriver.run(CliDriver.java:631)
at org.apache.hadoop.hive.cli.CliDriver.main(CliDriver.java:570)
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.lang.reflect.Method.invoke(Method.java:498)
at org.apache.hadoop.util.RunJar.run(RunJar.java:221)
at org.apache.hadoop.util.RunJar.main(RunJar.java:136)
FAILED: ParseException line 1:27 cannot recognize input near '(' 'eid' ',' in statement
How can I fix this?
INSERT INTO employee select 1, 'avi', '100000', 'boston';
It seems your version does not support insert columns list nor values.
P.s.
The error message is very clear.
You should focus on the last line.
Remember you need quotation marks as you are trying to insert Strings. Also, the field names are not necessary as you are inserting a value for all fields in your table. You might simply try the following:
INSERT INTO TABLE employee
VALUES (1, 'avi', '100000', 'boston');
You can't insert the data like this or insert command because insert command use in SQL.
For loading data into hive you have to create a one text file and you have to upload that file using load command.
For example
LOAD DATA LOCAL INPATH 'YOUR TXT FILE LOCATION' OVERWRITE INTO TABLE TABLENAME;
I have created the following table in hive:
hive> CREATE TABLE IF NOT EXISTS Sensorreading ( recvtime String, nodeid int, sensorid int, systemid int, value float);
OK
Time taken: 3.007 seconds
hive> describe Sensorreading;
OK
recvtime string
nodeid int
sensorid int
systemid int
value float
Time taken: 0.381 seconds
hive>
And now I need to insert data in it. I have tried this but it don't work:
INSERT INTO TABLE Sensorreading (recvtime, nodeid, sensorid, systemid, value) VALUES ('2015-05-29 11:10:00',1,1,1,-45.4);
How is the syntax of INSERT? Thanks
INSERT...VALUES is available starting in Hive 0.14.
Check if your Hive version is 0.14 or later.
Insert is possible in hive 0.14. But if you need to insert something than there are two ways for it (manual methods , not any paticular command):
1. First you can load it from text file(changes only done in it i.e including your rows in it)
2. You can copy the part file to local and than do changes and then again revert back to regular path.
I have the following:
hive> CREATE TABLE foo (bar timestamp) STORED AS ORC;
OK
Time taken: 0.041 seconds
hive> INSERT INTO TABLE foo VALUES ('2014-01-17 00:17:13');
NoViableAltException(26#[])
at org.apache.hadoop.hive.ql.parse.HiveParser_SelectClauseParser.selectClause(HiveParser_SelectClauseParser.java:742)
at org.apache.hadoop.hive.ql.parse.HiveParser.selectClause(HiveParser.java:40184)
at org.apache.hadoop.hive.ql.parse.HiveParser.singleSelectStatement(HiveParser.java:38048)
at org.apache.hadoop.hive.ql.parse.HiveParser.selectStatement(HiveParser.java:37754)
at org.apache.hadoop.hive.ql.parse.HiveParser.regularBody(HiveParser.java:37654)
at org.apache.hadoop.hive.ql.parse.HiveParser.queryStatementExpressionBody(HiveParser.java:36898)
at org.apache.hadoop.hive.ql.parse.HiveParser.queryStatementExpression(HiveParser.java:36774)
at org.apache.hadoop.hive.ql.parse.HiveParser.execStatement(HiveParser.java:1338)
at org.apache.hadoop.hive.ql.parse.HiveParser.statement(HiveParser.java:1036)
at org.apache.hadoop.hive.ql.parse.ParseDriver.parse(ParseDriver.java:199)
at org.apache.hadoop.hive.ql.parse.ParseDriver.parse(ParseDriver.java:166)
at org.apache.hadoop.hive.ql.Driver.compile(Driver.java:408)
at org.apache.hadoop.hive.ql.Driver.compile(Driver.java:322)
at org.apache.hadoop.hive.ql.Driver.compileInternal(Driver.java:976)
at org.apache.hadoop.hive.ql.Driver.runInternal(Driver.java:1041)
at org.apache.hadoop.hive.ql.Driver.run(Driver.java:912)
at org.apache.hadoop.hive.ql.Driver.run(Driver.java:902)
at org.apache.hadoop.hive.cli.CliDriver.processLocalCmd(CliDriver.java:268)
at org.apache.hadoop.hive.cli.CliDriver.processCmd(CliDriver.java:220)
at org.apache.hadoop.hive.cli.CliDriver.processLine(CliDriver.java:423)
at org.apache.hadoop.hive.cli.CliDriver.executeDriver(CliDriver.java:793)
at org.apache.hadoop.hive.cli.CliDriver.run(CliDriver.java:686)
at org.apache.hadoop.hive.cli.CliDriver.main(CliDriver.java:625)
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57)
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.lang.reflect.Method.invoke(Method.java:606)
at org.apache.hadoop.util.RunJar.main(RunJar.java:212)
FAILED: ParseException line 1:22 cannot recognize input near 'values' '(' ''2014-01-17 00:17:13'' in select clause
What is the correct way of inserting timestamps into a Hive table?
Hive version is: hive-0.13.0
According to Hive Language Manual "INSERT...VALUES is available starting in Hive 0.14".
So, you have only two options left:
Insert timestamp value from existing table or call a function to get it (e.g. from_unixtime(unix_timestamp()), see this answer for example)
Load data from a file.
This kind of writing is correct:
insert into table foo;
select '2014-01-17 00:17:13' as bar from foo