0: jdbc:hive2://localhost:10000> select * from error_type; INFO : Compiling command(queryId=hive_20220306113526_62d5507c-8df1-478b-8f9f-4ea1b8601df9): select * from error_type INFO : Semantic Analysis Completed INFO : Returning Hive schema: Schema(fieldSchemas:[FieldSchema(name:error_type.id, type:int, comment:null), FieldSchema(name:error_type.content, type:string, comment:null)], properties:null) INFO : Completed compiling command(queryId=hive_20220306113526_62d5507c-8df1-478b-8f9f-4ea1b8601df9); Time taken: 0.13 seconds INFO : Executing command(queryId=hive_20220306113526_62d5507c-8df1-478b-8f9f-4ea1b8601df9): select * from error_type INFO : Completed executing command(queryId=hive_20220306113526_62d5507c-8df1-478b-8f9f-4ea1b8601df9); Time taken: 0.001 seconds INFO : OK Error: java.io.IOException: org.apache.hadoop.hive.ql.metadata.HiveException: java.lang.ClassCastException: org.apache.hadoop.io.FloatWritable cannot be cast to org.apache.hadoop.io.IntWritable (state=,code=0)
Caused by: java.lang.ClassCastException: org.apache.hadoop.io.FloatWritable cannot be cast to org.apache.hadoop.io.IntWritable at org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableIntObjectInspector.get(WritableIntObjectInspector.java:36) at org.apache.hadoop.hive.ql.io.parquet.write.DataWritableWriter$IntDataWriter.write(DataWritableWriter.java:385) at org.apache.hadoop.hive.ql.io.parquet.write.DataWritableWriter$GroupDataWriter.write(DataWritableWriter.java:199) at org.apache.hadoop.hive.ql.io.parquet.write.DataWritableWriter$MessageDataWriter.write(DataWritableWriter.java:215) at org.apache.hadoop.hive.ql.io.parquet.write.DataWritableWriter.write(DataWritableWriter.java:88) at org.apache.hadoop.hive.ql.io.parquet.write.DataWritableWriteSupport.write(DataWritableWriteSupport.java:60) at org.apache.hadoop.hive.ql.io.parquet.write.DataWritableWriteSupport.write(DataWritableWriteSupport.java:32) at org.apache.parquet.hadoop.InternalParquetRecordWriter.write(InternalParquetRecordWriter.java:123) at org.apache.parquet.hadoop.ParquetRecordWriter.write(ParquetRecordWriter.java:179) at org.apache.parquet.hadoop.ParquetRecordWriter.write(ParquetRecordWriter.java:46) at org.apache.hadoop.hive.ql.io.parquet.write.ParquetRecordWriterWrapper.write(ParquetRecordWriterWrapper.java:136) at org.apache.hadoop.hive.ql.io.parquet.write.ParquetRecordWriterWrapper.write(ParquetRecordWriterWrapper.java:149) at org.apache.hadoop.hive.ql.exec.FileSinkOperator.process(FileSinkOperator.java:769) at org.apache.hadoop.hive.ql.exec.Operator.forward(Operator.java:882) at org.apache.hadoop.hive.ql.exec.SelectOperator.process(SelectOperator.java:95) at org.apache.hadoop.hive.ql.exec.Operator.forward(Operator.java:882) at org.apache.hadoop.hive.ql.exec.TableScanOperator.process(TableScanOperator.java:130) at org.apache.hadoop.hive.ql.exec.MapOperator$MapOpCtx.forward(MapOperator.java:146) at org.apache.hadoop.hive.ql.exec.MapOperator.process(MapOperator.java:484)
@Override @SuppressWarnings("unchecked") public Object getStructFieldData(Object data, StructField fieldRef){ if (data == null) { returnnull; } // We support both List<Object> and Object[] // so we have to do differently. boolean isArray = data.getClass().isArray(); if (!isArray && !(data instanceof List)) { if (!warned) { LOG.warn("Invalid type for struct " + data.getClass()); LOG.warn("ignoring similar errors."); warned = true; } return data; } int listSize = (isArray ? ((Object[]) data).length : ((List<Object>) data) .size()); MyField f = (MyField) fieldRef; if (fields.size() != listSize && !warned) { // TODO: remove this warned = true; LOG.warn("Trying to access " + fields.size() + " fields inside a list of " + listSize + " elements: " + (isArray ? Arrays.asList((Object[]) data) : (List<Object>) data)); LOG.warn("ignoring similar errors."); } int fieldID = f.getFieldID();
Caused by: java.lang.UnsupportedOperationException: Cannot inspect org.apache.hadoop.io.LongWritable at org.apache.hadoop.hive.ql.io.parquet.serde.primitive.ParquetStringInspector.getPrimitiveJavaObject(ParquetStringInspector.java:77) at org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorUtils.getLong(PrimitiveObjectInspectorUtils.java:709) at org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorConverter$LongConverter.convert(PrimitiveObjectInspectorConverter.java:182) at org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorConverters$StructConverter.convert(ObjectInspectorConverters.java:416) at org.apache.hadoop.hive.ql.exec.MapOperator$MapOpCtx.readRow(MapOperator.java:126) at org.apache.hadoop.hive.ql.exec.MapOperator$MapOpCtx.access$200(MapOperator.java:89) at org.apache.hadoop.hive.ql.exec.MapOperator.process(MapOperator.java:483)
Code
1 2 3 4 5 6 7 8 9
Caused by: java.lang.ClassCastException: org.apache.hadoop.io.FloatWritable cannot be cast to org.apache.hadoop.io.IntWritable at org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableIntObjectInspector.get(WritableIntObjectInspector.java:36) at org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorUtils.getDouble(PrimitiveObjectInspectorUtils.java:755) at org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorUtils.getFloat(PrimitiveObjectInspectorUtils.java:796) at org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorConverter$FloatConverter.convert(PrimitiveObjectInspectorConverter.java:211) at org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorConverters$StructConverter.convert(ObjectInspectorConverters.java:416) at org.apache.hadoop.hive.ql.exec.MapOperator$MapOpCtx.readRow(MapOperator.java:126) at org.apache.hadoop.hive.ql.exec.MapOperator$MapOpCtx.access$200(MapOperator.java:89) at org.apache.hadoop.hive.ql.exec.MapOperator.process(MapOperator.java:483)