Skip to content

Commit

Permalink
Additional work wrt #35, #38, regarding handling of root-level Array …
Browse files Browse the repository at this point in the history
…values (which now work)
  • Loading branch information
cowtowncoder committed Jan 18, 2017
1 parent af1e155 commit 156d20f
Show file tree
Hide file tree
Showing 5 changed files with 216 additions and 28 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -90,13 +90,10 @@ public JsonToken nextToken() throws IOException
switch (_state) {
case STATE_START:
_parser.setAvroContext(this);
_index = 0;
_count = _decoder.readArrayStart();
_state = (_count > 0) ? STATE_ELEMENTS : STATE_END;
{
JsonToken t = JsonToken.START_ARRAY;
_currToken = t;
return t;
}
return (_currToken = JsonToken.START_ARRAY);
case STATE_ELEMENTS:
if (_index < _count) {
break;
Expand All @@ -107,13 +104,18 @@ public JsonToken nextToken() throws IOException
}
// otherwise, we are done: fall through
case STATE_END:
_state = STATE_DONE;
_parser.setAvroContext(getParent());
{
JsonToken t = JsonToken.END_ARRAY;
_currToken = t;
return t;
final AvroReadContext parent = getParent();
// as per [dataformats-binary#38], may need to reset, instead of bailing out
if (parent.inRoot()) {
if (!_decoder.isEnd()) {
_index = 0;
_state = STATE_START;
return (_currToken = JsonToken.END_ARRAY);
}
}
_state = STATE_DONE;
_parser.setAvroContext(parent);
return (_currToken = JsonToken.END_ARRAY);
case STATE_DONE:
default:
throwIllegalState(_state);
Expand Down Expand Up @@ -157,11 +159,7 @@ public JsonToken nextToken() throws IOException
_parser.setAvroContext(this);
_count = _decoder.readArrayStart();
_state = (_count > 0) ? STATE_ELEMENTS : STATE_END;
{
JsonToken t = JsonToken.START_ARRAY;
_currToken = t;
return t;
}
return (_currToken = JsonToken.START_ARRAY);
case STATE_ELEMENTS:
if (_index < _count) {
break;
Expand All @@ -172,23 +170,26 @@ public JsonToken nextToken() throws IOException
}
// otherwise, we are done: fall through
case STATE_END:
_state = STATE_DONE;
_parser.setAvroContext(getParent());
{
JsonToken t = JsonToken.END_ARRAY;
_currToken = t;
return t;
final AvroReadContext parent = getParent();
// as per [dataformats-binary#38], may need to reset, instead of bailing out
if (parent.inRoot()) {
if (!_decoder.isEnd()) {
_index = 0;
_state = STATE_START;
return (_currToken = JsonToken.END_ARRAY);
}
}
_state = STATE_DONE;
_parser.setAvroContext(parent);
return (_currToken = JsonToken.END_ARRAY);
case STATE_DONE:
default:
throwIllegalState(_state);
}
++_index;
AvroStructureReader r = _elementReader.newReader(this, _parser, _decoder);
_parser.setAvroContext(r);
JsonToken t = r.nextToken();
_currToken = t;
return t;
return (_currToken = r.nextToken());
}
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,138 @@
package com.fasterxml.jackson.dataformat.avro;

import java.io.ByteArrayOutputStream;
import java.util.Arrays;
import java.util.List;

import com.fasterxml.jackson.core.*;

import com.fasterxml.jackson.databind.*;

public class ArrayTest extends AvroTestBase
{
private final AvroMapper MAPPER = getMapper();

// Simple test for a single array
public void testRootStringArray() throws Exception
{
AvroSchema schema = getStringArraySchema();
List<String> input = Arrays.asList("foo", "bar");

byte[] b = MAPPER.writer(schema).writeValueAsBytes(input);

// writing's good (probably), let's read. First as List:
List<String> result = MAPPER.readerFor(List.class)
.with(schema)
.readValue(b);
assertNotNull(result);
assertEquals(2, result.size());
assertEquals(input.get(0), result.get(0));
assertEquals(input.get(1), result.get(1));

// then as String array
String[] array = MAPPER.readerFor(String[].class)
.with(schema)
.readValue(b);
assertNotNull(array);
assertEquals(2, array.length);
assertEquals(input.get(0), array[0]);
assertEquals(input.get(1), array[1]);
}

// And more complex: sequence of (String) arrays
public void testStringArraySequence() throws Exception
{
AvroSchema schema = getStringArraySchema();
List<String> input1 = Arrays.asList("foo", "bar");
List<String> input2 = Arrays.asList("foobar");
String[] input3 = new String[] { "a", "b", "c"};

// First: write a sequence of 3 root-level Employee Objects

ByteArrayOutputStream b = new ByteArrayOutputStream();
SequenceWriter sw = MAPPER.writer(schema)
.writeValues(b);
sw.write(input1);
int curr = b.size();
sw.write(input2);
int diff = b.size() - curr;
if (diff == 0) {
fail("Should have output more bytes for second entry, did not, total: "+curr);
}
sw.write(input3);
sw.close();

// 18-Jan-2017, tatu: This get bit tricky just because `readValues()` doesn't
// quite know whether to advance cursor to START_ARRAY or not, and we must
// instead prepare things... and use direct bind

JsonParser p = MAPPER.getFactory().createParser(b.toByteArray());
p.setSchema(schema);

assertToken(JsonToken.START_ARRAY, p.nextToken());

List<?> result1 = MAPPER.readValue(p, List.class);
_compare(input1, result1);

assertToken(JsonToken.START_ARRAY, p.nextToken());
List<?> result2 = MAPPER.readValue(p, List.class);
_compare(input2, result2);

assertToken(JsonToken.START_ARRAY, p.nextToken());
List<?> result3 = MAPPER.readValue(p, List.class);
_compare(Arrays.asList(input3), result3);

assertNull(p.nextToken());
p.close();
}

// And the ultimate case of sequence of arrays of records
public void testEmployeeArraySequence() throws Exception
{
AvroSchema schema = MAPPER.schemaFrom(EMPLOYEE_ARRAY_SCHEMA_JSON);

Employee boss = new Employee("Bossman", 55, new String[] { "[email protected]" }, null);
Employee peon1 = new Employee("Worker#1", 24, new String[] { "[email protected]" }, boss);
Employee peon2 = new Employee("Worker#2", 43, new String[] { "[email protected]" }, boss);

// First: write a sequence of 3 root-level Employee Objects

ByteArrayOutputStream b = new ByteArrayOutputStream();
SequenceWriter sw = MAPPER.writer(schema)
.writeValues(b);
sw.write(new Employee[] { boss, peon1, peon2 });
int curr = b.size();
sw.write(new Employee[] { peon2, boss });
int diff = b.size() - curr;
if (diff == 0) {
fail("Should have output more bytes for second entry, did not, total: "+curr);
}
sw.close();

// 18-Jan-2017, tatu: This get bit tricky just because `readValues()` doesn't
// quite know whether to advance cursor to START_ARRAY or not, and we must
// instead prepare things... and use direct bind

JsonParser p = MAPPER.getFactory().createParser(b.toByteArray());
p.setSchema(schema);

assertToken(JsonToken.START_ARRAY, p.nextToken());

Employee[] result1 = MAPPER.readValue(p, Employee[].class);
assertEquals(3, result1.length);
assertEquals("Bossman", result1[0].name);
assertEquals("Worker#2", result1[2].name);

assertToken(JsonToken.START_ARRAY, p.nextToken());
Employee[] result2 = MAPPER.readValue(p, Employee[].class);
assertEquals(2, result2.length);
assertEquals("Bossman", result2[1].name);

assertNull(p.nextToken());
p.close();
}

private void _compare(List<String> input, List<?> result) {
assertEquals(input, result);
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@ public abstract class AvroTestBase extends TestCase
/* Test schemas
/**********************************************************
*/

protected final String EMPLOYEE_SCHEMA_JSON = "{\n"
+"\"type\": \"record\",\n"
+"\"name\": \"Employee\",\n"
Expand All @@ -30,6 +30,31 @@ public abstract class AvroTestBase extends TestCase
+" {\"name\": \"boss\", \"type\": [\"Employee\",\"null\"]}\n"
+"]}";

protected final String STRING_ARRAY_SCHEMA_JSON = "{\n"
+"\"name\": \"StringArray\",\n"
+"\"type\": \"array\",\n"
+"\"items\": \"string\"\n}";

protected final String STRING_MAP_SCHEMA_JSON = "{\n"
+"\"name\": \"StringMap\",\n"
+"\"type\": \"map\",\n"
+"\"values\": \"string\"\n}";

protected final String EMPLOYEE_ARRAY_SCHEMA_JSON = aposToQuotes(
"{"
+"'name': 'EmployeeArray',\n"
+"'type': 'array',\n"
+"'items': {\n"
+" 'type': 'record',\n"
+" 'name': 'Employee',\n"
+" 'fields': [\n"
+" {'name': 'name', 'type': 'string'},\n"
+" {'name': 'age', 'type': 'int'},\n"
+" {'name': 'emails', 'type': {'type': 'array', 'items': 'string'}},\n"
+" {'name': 'boss', 'type': ['Employee','null']}\n"
+" ]}\n"
+"}\n");

/*
/**********************************************************
/* Test classes
Expand Down Expand Up @@ -208,6 +233,14 @@ protected AvroSchema getEmployeeSchema() throws IOException {
return _employeeSchema;
}

protected AvroSchema getStringArraySchema() throws IOException {
return getMapper().schemaFrom(STRING_ARRAY_SCHEMA_JSON);
}

protected AvroSchema getStringMapSchema() throws IOException {
return getMapper().schemaFrom(STRING_MAP_SCHEMA_JSON);
}

protected AvroMapper getMapper() {
if (_sharedMapper == null) {
_sharedMapper = newMapper();
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,6 @@

import java.io.IOException;

import com.fasterxml.jackson.dataformat.avro.*;
import com.fasterxml.jackson.dataformat.avro.schema.AvroSchemaGenerator;

public class BinaryDataTest extends AvroTestBase
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -36,7 +36,7 @@ public void setStuff(Map<String,String> arg) {
}
}

public void testSimple() throws Exception
public void testRecordWithMap() throws Exception
{
AvroMapper mapper = getMapper();
AvroSchema schema = mapper.schemaFrom(MAP_SCHEMA_JSON);
Expand Down Expand Up @@ -140,4 +140,21 @@ public void testMapOrNull() throws Exception
assertEquals(1, output.stuff.size());
assertEquals("y", output.stuff.get("x"));
}

// 18-Jan-2017, tatu: It would seem reasonable to support root-level Maps too,
// since Records and Arrays work, but looks like there are some issues
// regarding them so can't yet test

/*
public void testRootStringMap() throws Exception
{
AvroMapper mapper = getMapper();
AvroSchema schema = getStringMapSchema();
Map<String,String> input = new LinkedHashMap<>();
input.put("a", "1");
input.put("b", "2");
byte[] b = mapper.writer(schema).writeValueAsBytes(input);
}
*/
}

0 comments on commit 156d20f

Please sign in to comment.