Skip to content

Commit 57212ff

Browse files
authored
Fix #275: add test to show usage (#634)
1 parent 487738e commit 57212ff

File tree

2 files changed

+215
-1
lines changed

2 files changed

+215
-1
lines changed

avro/src/test/java/com/fasterxml/jackson/dataformat/avro/schemaev/EnumEvolutionTest.java

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -33,7 +33,7 @@ protected static class Employee {
3333
public Gender gender;
3434
}
3535

36-
private final AvroMapper MAPPER = new AvroMapper();
36+
private final AvroMapper MAPPER = newMapper();
3737

3838
@Test
3939
public void testSimple() throws Exception
Lines changed: 214 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,214 @@
1+
package com.fasterxml.jackson.dataformat.avro.schemaev;
2+
3+
import org.junit.jupiter.api.Test;
4+
5+
import com.fasterxml.jackson.annotation.JsonInclude;
6+
import com.fasterxml.jackson.core.io.JsonEOFException;
7+
import com.fasterxml.jackson.dataformat.avro.*;
8+
9+
import static org.junit.jupiter.api.Assertions.*;
10+
11+
/**
12+
* Test for issue #275: Avro backward compatibility when adding fields with default values.
13+
* <p>
14+
* This test demonstrates that Jackson Avro DOES support backward compatibility correctly,
15+
* but users MUST use the {@code withReaderSchema()} method to enable schema resolution.
16+
* <p>
17+
* The key insight is that Avro binary format does not include schema metadata in the
18+
* serialized data. Therefore, when reading data that was written with schema A using
19+
* schema B, the library needs to be explicitly told about both schemas through the
20+
* {@code withReaderSchema()} API.
21+
* <p>
22+
* Common mistake: Trying to read old data with a new schema directly leads to
23+
* "Unexpected end-of-input" errors because the parser tries to read fields that
24+
* don't exist in the binary data.
25+
* <p>
26+
* Correct usage pattern:
27+
* <pre>
28+
* // Write with old schema
29+
* AvroSchema writerSchema = mapper.schemaFrom(OLD_SCHEMA_JSON);
30+
* byte[] data = mapper.writer(writerSchema).writeValueAsBytes(object);
31+
*
32+
* // Read with new schema (that has additional fields with defaults)
33+
* AvroSchema readerSchema = mapper.schemaFrom(NEW_SCHEMA_JSON);
34+
* AvroSchema resolved = writerSchema.withReaderSchema(readerSchema);
35+
* MyObject result = mapper.readerFor(MyObject.class)
36+
* .with(resolved) // Use resolved schema, not readerSchema directly!
37+
* .readValue(data);
38+
* </pre>
39+
*/
40+
public class Evolution275Test extends AvroTestBase
41+
{
42+
// Original schema with 8 fields (simulating the issue scenario)
43+
static String SCHEMA_V1_JSON = aposToQuotes("{\n"+
44+
" 'type':'record',\n"+
45+
" 'name':'Employee',\n"+
46+
" 'fields':[\n"+
47+
" { 'name':'code', 'type':'string' },\n"+
48+
" { 'name':'countryCode', 'type':'string' },\n"+
49+
" { 'name':'createdBy', 'type':'string' },\n"+
50+
" { 'name':'createdDate', 'type':'string' },\n"+
51+
" { 'name':'id', 'type':'long' },\n"+
52+
" { 'name':'lastModifiedBy', 'type':'string' },\n"+
53+
" { 'name':'lastModifiedDate', 'type':'string' },\n"+
54+
" { 'name':'name', 'type':'string' }\n"+
55+
" ]\n"+
56+
"}\n");
57+
58+
// Updated schema adding a 9th field with null default at the end
59+
static String SCHEMA_V2_JSON = aposToQuotes("{\n"+
60+
" 'type':'record',\n"+
61+
" 'name':'Employee',\n"+
62+
" 'fields':[\n"+
63+
" { 'name':'code', 'type':'string' },\n"+
64+
" { 'name':'countryCode', 'type':'string' },\n"+
65+
" { 'name':'createdBy', 'type':'string' },\n"+
66+
" { 'name':'createdDate', 'type':'string' },\n"+
67+
" { 'name':'id', 'type':'long' },\n"+
68+
" { 'name':'lastModifiedBy', 'type':'string' },\n"+
69+
" { 'name':'lastModifiedDate', 'type':'string' },\n"+
70+
" { 'name':'name', 'type':'string' },\n"+
71+
" { 'name':'phone', 'type':['null', 'string'], 'default':null }\n"+
72+
" ]\n"+
73+
"}\n");
74+
75+
// Simpler test with just 2 fields + new field with null default
76+
static String SCHEMA_SIMPLE_V1_JSON = aposToQuotes("{\n"+
77+
" 'type':'record',\n"+
78+
" 'name':'SimpleRecord',\n"+
79+
" 'fields':[\n"+
80+
" { 'name':'id', 'type':'int' },\n"+
81+
" { 'name':'name', 'type':'string' }\n"+
82+
" ]\n"+
83+
"}\n");
84+
85+
static String SCHEMA_SIMPLE_V2_JSON = aposToQuotes("{\n"+
86+
" 'type':'record',\n"+
87+
" 'name':'SimpleRecord',\n"+
88+
" 'fields':[\n"+
89+
" { 'name':'id', 'type':'int' },\n"+
90+
" { 'name':'name', 'type':'string' },\n"+
91+
" { 'name':'phone', 'type':['null', 'string'], 'default':null }\n"+
92+
" ]\n"+
93+
"}\n");
94+
95+
@JsonInclude(JsonInclude.Include.NON_NULL)
96+
static class Employee {
97+
public String code;
98+
public String countryCode;
99+
public String createdBy;
100+
public String createdDate;
101+
public long id;
102+
public String lastModifiedBy;
103+
public String lastModifiedDate;
104+
public String name;
105+
public String phone;
106+
107+
protected Employee() { }
108+
109+
public Employee(String code, String countryCode, String createdBy,
110+
String createdDate, long id, String lastModifiedBy,
111+
String lastModifiedDate, String name) {
112+
this.code = code;
113+
this.countryCode = countryCode;
114+
this.createdBy = createdBy;
115+
this.createdDate = createdDate;
116+
this.id = id;
117+
this.lastModifiedBy = lastModifiedBy;
118+
this.lastModifiedDate = lastModifiedDate;
119+
this.name = name;
120+
}
121+
}
122+
123+
@JsonInclude(JsonInclude.Include.NON_NULL)
124+
static class SimpleRecord {
125+
public int id;
126+
public String name;
127+
public String phone;
128+
129+
protected SimpleRecord() { }
130+
131+
public SimpleRecord(int id, String name) {
132+
this.id = id;
133+
this.name = name;
134+
}
135+
}
136+
137+
private final AvroMapper MAPPER = newMapper();
138+
139+
@Test
140+
public void testSimpleAddNullableFieldWithDefault() throws Exception
141+
{
142+
final AvroSchema srcSchema = MAPPER.schemaFrom(SCHEMA_SIMPLE_V1_JSON);
143+
final AvroSchema dstSchema = MAPPER.schemaFrom(SCHEMA_SIMPLE_V2_JSON);
144+
final AvroSchema xlate = srcSchema.withReaderSchema(dstSchema);
145+
146+
// Write data using old schema (without phone field)
147+
byte[] avro = MAPPER.writer(srcSchema).writeValueAsBytes(new SimpleRecord(1, "Alice"));
148+
149+
// Read using new schema (with phone field defaulting to null)
150+
// This should NOT throw "Unexpected end-of-input in FIELD_NAME"
151+
SimpleRecord result = MAPPER.readerFor(SimpleRecord.class)
152+
.with(xlate)
153+
.readValue(avro);
154+
155+
assertEquals(1, result.id);
156+
assertEquals("Alice", result.name);
157+
assertNull(result.phone); // Should use default value
158+
}
159+
160+
// This test demonstrates INCORRECT usage: trying to read data serialized with an old schema
161+
// using a new schema directly, without calling withReaderSchema().
162+
// This is expected to fail because Avro binary format doesn't include schema metadata,
163+
// so the reader can't know the data was written with a different schema.
164+
// Users MUST call withReaderSchema() when reading data written with a different schema.
165+
@Test
166+
public void testSimpleAddNullableFieldWithDefaultWrongUsage() throws Exception
167+
{
168+
final AvroSchema srcSchema = MAPPER.schemaFrom(SCHEMA_SIMPLE_V1_JSON);
169+
final AvroSchema dstSchema = MAPPER.schemaFrom(SCHEMA_SIMPLE_V2_JSON);
170+
171+
// Write data using old schema (without phone field)
172+
byte[] avro = MAPPER.writer(srcSchema).writeValueAsBytes(new SimpleRecord(1, "Alice"));
173+
174+
// INCORRECT: Try to read with new schema directly without using withReaderSchema
175+
// This triggers EOF error because the reader expects to find the phone field in binary data
176+
// but the data doesn't contain it.
177+
JsonEOFException thrown = assertThrows(JsonEOFException.class, () -> {
178+
MAPPER.readerFor(SimpleRecord.class)
179+
.with(dstSchema) // Using dstSchema directly instead of xlate
180+
.readValue(avro);
181+
});
182+
183+
verifyException(thrown, "Unexpected end-of-input in FIELD_NAME");
184+
}
185+
186+
@Test
187+
public void testAddNullableFieldWithDefault() throws Exception
188+
{
189+
final AvroSchema srcSchema = MAPPER.schemaFrom(SCHEMA_V1_JSON);
190+
final AvroSchema dstSchema = MAPPER.schemaFrom(SCHEMA_V2_JSON);
191+
final AvroSchema xlate = srcSchema.withReaderSchema(dstSchema);
192+
193+
// Write data using old schema (without phone field)
194+
Employee emp = new Employee("EMP001", "US", "admin", "2024-01-01",
195+
123L, "admin", "2024-01-01", "John Doe");
196+
byte[] avro = MAPPER.writer(srcSchema).writeValueAsBytes(emp);
197+
198+
// Read using new schema (with phone field defaulting to null)
199+
// This should NOT throw "Unexpected end-of-input in FIELD_NAME"
200+
Employee result = MAPPER.readerFor(Employee.class)
201+
.with(xlate)
202+
.readValue(avro);
203+
204+
assertEquals("EMP001", result.code);
205+
assertEquals("US", result.countryCode);
206+
assertEquals("admin", result.createdBy);
207+
assertEquals("2024-01-01", result.createdDate);
208+
assertEquals(123L, result.id);
209+
assertEquals("admin", result.lastModifiedBy);
210+
assertEquals("2024-01-01", result.lastModifiedDate);
211+
assertEquals("John Doe", result.name);
212+
assertNull(result.phone); // Should use default value
213+
}
214+
}

0 commit comments

Comments
 (0)