Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Remove Jackson usage in azure-storage-internal-avro #41334

Merged
merged 3 commits into from
Jul 29, 2024
Merged
Show file tree
Hide file tree
Changes from 2 commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -72,7 +72,7 @@ public final class StorageCommonTestUtils {
@SuppressWarnings("deprecation")
private static HttpClient createJdkHttpClient() throws ReflectiveOperationException {
Class<?> clazz = Class.forName("com.azure.core.http.jdk.httpclient.JdkHttpClientProvider");
return (HttpClient) clazz.getDeclaredMethod("createInstance").invoke(clazz.newInstance());
return (HttpClient) clazz.getDeclaredMethod("createInstance").invoke(clazz.newInstance());
}

/**
Expand Down Expand Up @@ -198,8 +198,8 @@ public static boolean compareFiles(File file1, File file2, long offset, long cou
* @return The instrumented builder.
*/
@SuppressWarnings("unchecked")
public static <T extends HttpTrait<T>, E extends Enum<E>> T instrument(T builder,
HttpLogOptions logOptions, InterceptorManager interceptorManager) {
public static <T extends HttpTrait<T>, E extends Enum<E>> T instrument(T builder, HttpLogOptions logOptions,
InterceptorManager interceptorManager) {
// Groovy style reflection. All our builders follow this pattern.
builder.httpClient(getHttpClient(interceptorManager));

Expand All @@ -210,12 +210,11 @@ public static <T extends HttpTrait<T>, E extends Enum<E>> T instrument(T builder
if (ENVIRONMENT.getServiceVersion() != null) {
try {
Method serviceVersionMethod = Arrays.stream(builder.getClass().getDeclaredMethods())
.filter(method -> "serviceVersion".equals(method.getName())
&& method.getParameterCount() == 1
.filter(method -> "serviceVersion".equals(method.getName()) && method.getParameterCount() == 1
&& ServiceVersion.class.isAssignableFrom(method.getParameterTypes()[0]))
.findFirst()
.orElseThrow(() -> new RuntimeException("Unable to find serviceVersion method for builder: "
+ builder.getClass()));
.orElseThrow(() -> new RuntimeException(
"Unable to find serviceVersion method for builder: " + builder.getClass()));
Class<E> serviceVersionClass = (Class<E>) serviceVersionMethod.getParameterTypes()[0];
ServiceVersion serviceVersion = (ServiceVersion) Enum.valueOf(serviceVersionClass,
ENVIRONMENT.getServiceVersion());
Expand Down Expand Up @@ -315,8 +314,8 @@ public static TokenCredential getTokenCredential(InterceptorManager interceptorM
} else { //live
Configuration config = Configuration.getGlobalConfiguration();

ChainedTokenCredentialBuilder builder = new ChainedTokenCredentialBuilder()
.addLast(new EnvironmentCredentialBuilder().build())
ChainedTokenCredentialBuilder builder = new ChainedTokenCredentialBuilder().addLast(
new EnvironmentCredentialBuilder().build())
.addLast(new AzureCliCredentialBuilder().build())
.addLast(new AzureDeveloperCliCredentialBuilder().build());

Expand All @@ -325,19 +324,18 @@ public static TokenCredential getTokenCredential(InterceptorManager interceptorM
String tenantId = config.get("AZURESUBSCRIPTION_TENANT_ID");
String systemAccessToken = config.get("SYSTEM_ACCESSTOKEN");

if (!CoreUtils.isNullOrEmpty(serviceConnectionId)
&& !CoreUtils.isNullOrEmpty(clientId)
&& !CoreUtils.isNullOrEmpty(tenantId)
&& !CoreUtils.isNullOrEmpty(systemAccessToken)) {
if (!CoreUtils.isNullOrEmpty(serviceConnectionId) && !CoreUtils.isNullOrEmpty(clientId)
&& !CoreUtils.isNullOrEmpty(tenantId) && !CoreUtils.isNullOrEmpty(systemAccessToken)) {

AzurePipelinesCredential pipelinesCredential = new AzurePipelinesCredentialBuilder()
.systemAccessToken(systemAccessToken)
AzurePipelinesCredential pipelinesCredential = new AzurePipelinesCredentialBuilder().systemAccessToken(
systemAccessToken)
.clientId(clientId)
.tenantId(tenantId)
.serviceConnectionId(serviceConnectionId)
.build();

builder.addLast(request -> pipelinesCredential.getToken(request).subscribeOn(Schedulers.boundedElastic()));
builder.addLast(
request -> pipelinesCredential.getToken(request).subscribeOn(Schedulers.boundedElastic()));
}

builder.addLast(new AzurePowerShellCredentialBuilder().build());
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -3,19 +3,7 @@
<!-- This file is generated by the /eng/scripts/linting_suppression_generator.py script. -->

<suppressions>
<suppress files="com.azure.storage.internal.avro.implementation.schema.AvroSchema.java" checks="AvoidNestedBlocksCheck" />
<suppress files="com.azure.storage.internal.avro.implementation.schema.AvroType.java" checks="AvoidNestedBlocksCheck" />
<suppress files="com.azure.storage.internal.avro.implementation.AvroReader.java" checks="MissingJavadocMethodCheck" />
<suppress files="com.azure.storage.internal.avro.implementation.AvroConstants.java" checks="MissingJavadocTypeCheck" />
<suppress files="com.azure.storage.internal.avro.implementation.schema.primitive.AvroNullSchema.java" checks="MissingJavadocTypeCheck" />
<suppress files="com.azure.storage.internal.avro.implementation.AvroParser.java" checks="com.azure.tools.checkstyle.checks.EnforceFinalFieldsCheck" />
<suppress files="com.azure.storage.internal.avro.implementation.schema.complex.AvroArraySchema.java" checks="com.azure.tools.checkstyle.checks.EnforceFinalFieldsCheck" />
<suppress files="com.azure.storage.internal.avro.implementation.schema.complex.AvroMapSchema.java" checks="com.azure.tools.checkstyle.checks.EnforceFinalFieldsCheck" />
<suppress files="com.azure.storage.internal.avro.implementation.schema.complex.AvroRecordSchema.java" checks="com.azure.tools.checkstyle.checks.EnforceFinalFieldsCheck" />
<suppress files="com.azure.storage.internal.avro.implementation.schema.file.AvroBlockSchema.java" checks="com.azure.tools.checkstyle.checks.EnforceFinalFieldsCheck" />
<suppress files="com.azure.storage.internal.avro.implementation.schema.AvroCompositeSchema.java" checks="com.azure.tools.checkstyle.checks.JavaDocFormatting" />
<suppress files="com.azure.storage.internal.avro.implementation.schema.AvroSchema.java" checks="com.azure.tools.checkstyle.checks.JavaDocFormatting" />
<suppress files="com.azure.storage.internal.avro.implementation.schema.AvroSimpleSchema.java" checks="com.azure.tools.checkstyle.checks.JavaDocFormatting" />
<suppress files="com.azure.storage.internal.avro.implementation.schema.AvroSchema.java" checks="com.azure.tools.checkstyle.checks.JavadocThrowsChecks" />
<suppress files="com.azure.storage.internal.avro.implementation.schema.AvroType.java" checks="com.azure.tools.checkstyle.checks.JavadocThrowsChecks" />
</suppressions>
21 changes: 0 additions & 21 deletions sdk/storage/azure-storage-internal-avro/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -61,12 +61,6 @@
<version>12.26.0</version> <!-- {x-version-update;com.azure:azure-storage-common;current} -->
</dependency>

<dependency>
<groupId>com.fasterxml.jackson.dataformat</groupId>
<artifactId>jackson-dataformat-xml</artifactId>
<version>2.13.5</version> <!-- {x-version-update;com.fasterxml.jackson.dataformat:jackson-dataformat-xml;external_dependency} -->
</dependency>

<dependency>
<groupId>org.junit.jupiter</groupId>
<artifactId>junit-jupiter-api</artifactId>
Expand Down Expand Up @@ -179,21 +173,6 @@
</executions>
</plugin>
<!-- END: Empty Java Doc -->

<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-enforcer-plugin</artifactId>
<version>3.4.1</version> <!-- {x-version-update;org.apache.maven.plugins:maven-enforcer-plugin;external_dependency} -->
<configuration>
<rules>
<bannedDependencies>
<includes>
<include>com.fasterxml.jackson.dataformat:jackson-dataformat-xml:[2.13.5]</include> <!-- {x-include-update;com.fasterxml.jackson.dataformat:jackson-dataformat-xml;external_dependency} -->
</includes>
</bannedDependencies>
</rules>
</configuration>
</plugin>
</plugins>
</build>
</project>
Original file line number Diff line number Diff line change
Expand Up @@ -20,12 +20,12 @@

/**
* A class that represents a push based AvroParser that can parse avro data from a reactive stream.
*
* <p>
* The parser stores the {@link AvroParserState current state}, the sync marker (parsed from the header),
* the file type (parsed from the header metadata), and the list of records collected so far.
*
* <p>
* The {@link AvroParser#parse(ByteBuffer)} method accepts ByteBuffers as they are emitted from the stream.
*
* <p>
* Header Block Block Block ....
*/
public class AvroParser {
Expand All @@ -47,7 +47,7 @@ public class AvroParser {
/* Holds objects collected so far. */
private List<AvroObject> objects;

private boolean partialRead; /* Whether the Avro Parser will read the Header and Block off different
private final boolean partialRead; /* Whether the Avro Parser will read the Header and Block off different
streams. This is custom functionality for Changefeed. */

/**
Expand All @@ -59,21 +59,18 @@ public class AvroParser {
this.partialRead = partialRead;

/* Start off by adding the header schema to the stack so we can parse it. */
AvroHeaderSchema headerSchema = new AvroHeaderSchema(
this.state,
this::onFilteredHeader
);
AvroHeaderSchema headerSchema = new AvroHeaderSchema(this.state, this::onFilteredHeader);
headerSchema.pushToStack();
}

Mono<Void> prepareParserToReadBody(long sourceOffset, long thresholdIndex) {
if (!this.partialRead) {
return Mono.error(new IllegalStateException("This method should only be called when parsing header "
+ "and body separately."));
return Mono.error(new IllegalStateException(
"This method should only be called when parsing header " + "and body separately."));
}
if (this.objectType == null || this.syncMarker == null) {
return Mono.error(new IllegalStateException("Expected to read entire header before preparing "
+ "parser to read body."));
return Mono.error(
new IllegalStateException("Expected to read entire header before preparing " + "parser to read body."));
}
this.state = new AvroParserState(sourceOffset);
this.objects = new ArrayList<>();
Expand Down Expand Up @@ -111,23 +108,17 @@ private void onFilteredHeader(Object header) {
* Block handler.
*
* @param beginObjectIndex The object index after which to start aggregating events in the block.
* By default, this is 0 to collect all objects in the block.
* By default, this is 0 to collect all objects in the block.
*/
private void onBlock(Object beginObjectIndex) {
/* On reading the block, read another block. */
AvroSchema.checkType("beginObjectIndex", beginObjectIndex, Long.class);

final AvroBlockSchema blockSchema = new AvroBlockSchema(
this.objectType,
(Long) beginObjectIndex,
o -> {
AvroSchema.checkType("object", o, AvroObject.class);
this.objects.add((AvroObject) o);
}, /* Object result handler. */
this.syncMarker,
this.state,
this::onBlock
);
final AvroBlockSchema blockSchema = new AvroBlockSchema(this.objectType, (Long) beginObjectIndex, o -> {
AvroSchema.checkType("object", o, AvroObject.class);
this.objects.add((AvroObject) o);
}, /* Object result handler. */
this.syncMarker, this.state, this::onBlock);
blockSchema.pushToStack();
}

Expand Down Expand Up @@ -158,8 +149,8 @@ public Flux<AvroObject> parse(ByteBuffer buffer) {
return Flux.empty();
}
AvroSchema schema = this.state.peekFromStack();
while ((schema instanceof AvroCompositeSchema)
|| ((schema instanceof AvroSimpleSchema) && ((AvroSimpleSchema) schema).canProgress())) {
while ((schema instanceof AvroCompositeSchema) || ((schema instanceof AvroSimpleSchema)
&& ((AvroSimpleSchema) schema).canProgress())) {
if (schema instanceof AvroSimpleSchema) {
((AvroSimpleSchema) schema).progress();
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -9,5 +9,10 @@
* An interface that represents an AvroReader.
*/
public interface AvroReader {
/**
* Read a stream of {@link AvroObject}.
*
* @return A stream of {@link AvroObject}.
*/
Flux<AvroObject> read();
}
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@

/**
* An abstract class that represents a composite Avro schema that can return an Object result.
*
* <p>
* Composite avro schemas depend on other avro schemas to populate the result.
*
* @see AvroSchema
Expand All @@ -18,7 +18,7 @@ public abstract class AvroCompositeSchema extends AvroSchema {
/**
* Constructs a new Schema.
*
* @param state The state of the parser.
* @param state The state of the parser.
* @param onResult The result handler.
*/
public AvroCompositeSchema(AvroParserState state, Consumer<Object> onResult) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -57,7 +57,7 @@ public abstract class AvroSchema {
/**
* Constructs a new Schema.
*
* @param state The state of the parser.
* @param state The state of the parser.
* @param onResult The result handler.
*/
public AvroSchema(AvroParserState state, Consumer<Object> onResult) {
Expand All @@ -72,7 +72,9 @@ public AvroSchema(AvroParserState state, Consumer<Object> onResult) {
public abstract void pushToStack();

/**
* @return Whether or not the schema is done. Also indicates that the result is ready.
* Whether the schema is done. Also indicates that the result is ready.
*
* @return Whether the schema is done. Also indicates that the result is ready.
*/
public boolean isDone() {
return this.done;
Expand All @@ -88,8 +90,8 @@ public void publishResult() {
/**
* Gets the schema associated with the type.
*
* @param type The {@link AvroType type} that defines the schema.
* @param state {@link AvroParserState}
* @param type The {@link AvroType type} that defines the schema.
* @param state {@link AvroParserState}
* @param onResult {@link Consumer}
* @return {@link AvroSchema}
* @see AvroType
Expand All @@ -112,36 +114,36 @@ public static AvroSchema getSchema(AvroType type, AvroParserState state, Consume
return new AvroBytesSchema(state, onResult);
case STRING:
return new AvroStringSchema(state, onResult);
case RECORD: {
case RECORD:
checkType("type", type, AvroType.AvroRecordType.class);
AvroType.AvroRecordType recordType = (AvroType.AvroRecordType) type;
return new AvroRecordSchema(recordType.getName(), recordType.getFields(), state, onResult);
}
case ENUM: {

case ENUM:
checkType("type", type, AvroType.AvroEnumType.class);
AvroType.AvroEnumType enumType = (AvroType.AvroEnumType) type;
return new AvroEnumSchema(enumType.getSymbols(), state, onResult);
}
case ARRAY: {

case ARRAY:
checkType("type", type, AvroType.AvroArrayType.class);
AvroType.AvroArrayType arrayType = (AvroType.AvroArrayType) type;
return new AvroArraySchema(arrayType.getItemType(), state, onResult);
}
case MAP: {

case MAP:
checkType("type", type, AvroType.AvroMapType.class);
AvroType.AvroMapType mapType = (AvroType.AvroMapType) type;
return new AvroMapSchema(mapType.getValueType(), state, onResult);
}
case UNION: {

case UNION:
checkType("type", type, AvroType.AvroUnionType.class);
AvroType.AvroUnionType unionType = (AvroType.AvroUnionType) type;
return new AvroUnionSchema(unionType.getTypes(), state, onResult);
}
case FIXED: {

case FIXED:
checkType("type", type, AvroType.AvroFixedType.class);
AvroType.AvroFixedType fixedType = (AvroType.AvroFixedType) type;
return new AvroFixedSchema(fixedType.getSize(), state, onResult);
}

default:
throw new RuntimeException("Unsupported type " + type.getType());
}
Expand All @@ -150,14 +152,14 @@ public static AvroSchema getSchema(AvroType type, AvroParserState state, Consume
/**
* Checks if the object matches the expected type.
*
* @param name The name of the variable.
* @param obj The object.
* @param name The name of the variable.
* @param obj The object.
* @param expectedType The expected type.
*/
public static void checkType(String name, Object obj, Class<?> expectedType) {
if (!expectedType.isAssignableFrom(obj.getClass())) {
throw new IllegalStateException(String.format(
"Expected '%s' to be of type %s", name, expectedType.getSimpleName()));
throw new IllegalStateException(
String.format("Expected '%s' to be of type %s", name, expectedType.getSimpleName()));
}
}

Expand All @@ -168,13 +170,10 @@ public static void checkType(String name, Object obj, Class<?> expectedType) {
* @return The byte array.
*/
public static byte[] getBytes(List<?> bytes) {
long longTotalBytes = bytes
.stream()
.mapToLong(buffer -> {
checkType("buffer", buffer, ByteBuffer.class);
return ((ByteBuffer) buffer).remaining();
})
.sum();
long longTotalBytes = bytes.stream().mapToLong(buffer -> {
checkType("buffer", buffer, ByteBuffer.class);
return ((ByteBuffer) buffer).remaining();
}).sum();

if (longTotalBytes > Integer.MAX_VALUE) {
throw new IllegalArgumentException("Bytes can not fit into a single array.");
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@

/**
* An abstract class that represents a simple Avro schema that can return an Object result.
*
* <p>
* Simple avro schemas directly consume bytes from the state to populate the result.
*
* @see AvroSchema
Expand All @@ -18,15 +18,17 @@ public abstract class AvroSimpleSchema extends AvroSchema {
/**
* Constructs a new Schema.
*
* @param state The state of the parser.
* @param state The state of the parser.
* @param onResult The result handler.
*/
public AvroSimpleSchema(AvroParserState state, Consumer<Object> onResult) {
super(state, onResult);
}

/**
* @return Whether or not progress can be made for this schema.
* Whether progress can be made for this schema.
*
* @return Whether progress can be made for this schema.
*/
public abstract boolean canProgress();

Expand Down
Loading
Loading