java.lang.NoSuchMethodError: org.codehaus.jackson.JsonFactory.enable(Lorg/codehaus/jackson/JsonParser$Feature;

问题描述 投票:0回答:2

在 Eclipse 中执行 java 代码时出现以下错误(我没有使用 Maven)

Exception in thread "main" java.lang.NoSuchMethodError: org.codehaus.jackson.JsonFactory.enable(Lorg/codehaus/jackson/JsonParser$Feature;)Lorg/codehaus/jackson/JsonFactory;
    at org.apache.avro.Schema.<clinit>(Schema.java:88)
    at org.apache.avro.Schema$Parser.parse(Schema.java:997)
    at com.rishav.avro.AvroExampleWithoutCodeGeneration.serialize(AvroExampleWithoutCodeGeneration.java:36)
    at com.rishav.avro.AvroExampleWithoutCodeGeneration.main(AvroExampleWithoutCodeGeneration.java:94)

我正在使用罐子:

avro-1.8.2.jar
java-jason.jar
jason-simple-1.1.1.jar
org.apache.sling.commons.json-2.0.6-sources.jar
org.apache.sling.launchpad-9
jackson-core-asl-1.1.0.jar
jackson-mapper-asl-1.1.0.jar

第 36 行 --> Schema schema = new Schema.Parser().parse(new File("StudentActivity.avsc"));

package com.rishav.avro;

import java.io.File;
import java.io.FileInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.util.Iterator;
import java.util.LinkedHashMap;

import org.apache.avro.Schema;
import org.apache.avro.file.DataFileReader;
import org.apache.avro.file.DataFileWriter;
import org.apache.avro.generic.GenericData;
import org.apache.avro.generic.GenericDatumReader;
import org.apache.avro.generic.GenericDatumWriter;
import org.apache.avro.generic.GenericRecord;
//import org.apache.avro.io.BinaryDecoder;
import org.apache.avro.io.DatumReader;
import org.apache.avro.io.DatumWriter;


import org.codehaus.jackson.JsonFactory;
import org.codehaus.jackson.JsonParseException;
import org.codehaus.jackson.JsonProcessingException;
import org.codehaus.jackson.map.ObjectMapper;
import org.json.simple.JSONObject;



public class AvroExampleWithoutCodeGeneration {
    public void serialize() throws JsonParseException, JsonProcessingException, IOException {

        InputStream in = new FileInputStream("StudentActivity.json");

        // create a schema
        Schema schema = new Schema.Parser().parse(new File("StudentActivity.avsc"));**// THIS IS LINE 36**
        // create a record to hold json
        GenericRecord AvroRec = new GenericData.Record(schema);
        // create a record to hold course_details 
        GenericRecord CourseRec = new GenericData.Record(schema.getField("course_details").schema());
        // this file will have AVro output data
        File AvroFile = new File("resources/StudentActivity.avro");
        // Create a writer to serialize the record
        DatumWriter<GenericRecord> datumWriter = new GenericDatumWriter<GenericRecord>(schema);              
        DataFileWriter<GenericRecord> dataFileWriter = new DataFileWriter<GenericRecord>(datumWriter);

        dataFileWriter.create(schema, AvroFile);

        // iterate over JSONs present in input file and write to Avro output file
        ObjectMapper mapper = new ObjectMapper();
        Iterator it= (Iterator) mapper.readValue(new JsonFactory().createJsonParser(in), JSONObject.class); 
        while (it.hasNext())
        {
        //for (Iterator it = mapper.readValues(new JsonFactory().createJsonParser(in), JSONObject.class); it.hasNext();) {

            JSONObject JsonRec = (JSONObject) it.next();
            AvroRec.put("id", JsonRec.get("id"));
            AvroRec.put("student_id", JsonRec.get("student_id"));
            AvroRec.put("university_id", JsonRec.get("university_id"));

            LinkedHashMap CourseDetails = (LinkedHashMap) JsonRec.get("course_details");
            CourseRec.put("course_id", CourseDetails.get("course_id"));
            CourseRec.put("enroll_date", CourseDetails.get("enroll_date"));
            CourseRec.put("verb", CourseDetails.get("verb"));
            CourseRec.put("result_score", CourseDetails.get("result_score"));

            AvroRec.put("course_details", CourseRec);

            dataFileWriter.append(AvroRec);
        }  // end of for loop

        in.close();
        dataFileWriter.close();

    } // end of serialize method

    public void deserialize () throws IOException {
        // create a schema
        Schema schema = new Schema.Parser().parse(new File("resources/StudentActivity.avsc"));
        // create a record using schema
        GenericRecord AvroRec = new GenericData.Record(schema);
        File AvroFile = new File("resources/StudentActivity.avro");
        DatumReader<GenericRecord> datumReader = new GenericDatumReader<GenericRecord>(schema);
        DataFileReader<GenericRecord> dataFileReader = new DataFileReader<GenericRecord>(AvroFile, datumReader);
        System.out.println("Deserialized data is :");
        while (dataFileReader.hasNext()) {
            AvroRec = dataFileReader.next(AvroRec);
            System.out.println(AvroRec);
        }
    }

    public static void main(String[] args) throws JsonParseException, JsonProcessingException, IOException {
        AvroExampleWithoutCodeGeneration AvroEx = new AvroExampleWithoutCodeGeneration();
        AvroEx.serialize();
        AvroEx.deserialize();
    }

}
json apache jackson avro
2个回答
0
投票

你可以用这个代替:

Schema schema = new Schema.Parser().parse.newFile("resources/StudentActivity.avsc"); 

0
投票

尝试一下,因为 Avro 生成的 java 类有一个名为 getSchema() 的方法,并且它们没有 setter。所以它抱怨 avro 特定方法破坏了 Jackson 对象映射器的正常工作。尝试一下应该可以解决您的问题:

public class AvroJacksonObjectMapper extends ObjectMapper
{
  public AvroJacksonObjectMapper()
  {
    super();
    registerModule(new JavaTimeModule());
    configure(SerializationFeature.WRITE_DATES_AS_TIMESTAMPS, false);
    addMixIn(SpecificRecordBase.class, SpecificRecordBaseMixIn.class);
  }

  public abstract class SpecificRecordBaseMixIn
  {
    @JsonIgnore
    abstract void getSchema();

    @JsonIgnore
    abstract void getSpecificData();
  }

}

现在使用此对象映射器将 avro 对象序列化为 json 字符串,反之亦然。

// AvroObject is a class generated out of Avro Schema

ObjectMapper objectMapper = new AvroObjectMapper();    
AvroObject o = objectMapper.readValue("json string", AvroObject.class);
String jsonString = objectMapper.writeValueAsString(avroObject);
© www.soinside.com 2019 - 2024. All rights reserved.