Skip to content

Commit

Permalink
343 issue refactor schema registry sampler (#373)
Browse files Browse the repository at this point in the history
* [WIP] Apicurio integration

* 302 Fix some  unit test. In progress...

* 302 Add wiremock server stubs

* [WIP] Apicurio integration

* 302 Fix some  unit test. In progress...

* 302 Add wiremock server stubs

* [WIP] Apicurio integration

* 302 Fix some  unit test. In progress...

* 302 Add wiremock server stubs

* feat: add files in schemaRegistry adapter impl. Pending SchemaResgistryFactory

* 343 Fix integration issues

* add ParsedSchemaAdapter

* feat: refactor parseSchemaParser

* feat: change creatorFactory

* feat:error casting protobufSchema

* 343 Fix casting error to ProtobufSchema

* feat:error casting protobufSchema

* feat: type json ok

* changed version in poms

* changed version in poms, resolve posible null in getParsedSchema

* Update src/main/java/com/sngular/kloadgen/sampler/schemaregistry/adapter/impl/ConfluentParsedSchemaMetadata.java

Co-authored-by: sonatype-lift[bot] <37194012+sonatype-lift[bot]@users.noreply.github.com>

* changed return getType in ConfluentParsedSchemaMetadata

* changed remove variable Reference in ConfluentParsedSchemaMetadata

* resolve nullPointer getSchema and "UnnecessarilyFullyQualified"

* resolve nullPointer getSchema and "UnnecessarilyFullyQualified"

* resolve nullPointer getSchema

* add final in variables and removed unused imports

* delete apicurioParsedSchema duplicated, add license in jmeterhelper and changed size buffer serializer avro

* delete avroAdapter

---------

Co-authored-by: mseijasm <miguel.seijas@sngular.com>
Co-authored-by: Raúl Abad <raul.abad@sngular.com>
Co-authored-by: sonatype-lift[bot] <37194012+sonatype-lift[bot]@users.noreply.github.com>
  • Loading branch information
4 people authored Apr 21, 2023
1 parent dd931dd commit d6e397b
Show file tree
Hide file tree
Showing 19 changed files with 490 additions and 285 deletions.
508 changes: 298 additions & 210 deletions Example-Test-Plan.jmx

Large diffs are not rendered by default.

2 changes: 1 addition & 1 deletion pom-maven-central.xml
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@

<artifactId>kloadgen</artifactId>

<version>5.5.1</version>
<version>5.6.0</version>

<name>KLoadGen</name>
<description>Load Generation Jmeter plugin for Kafka Cluster. Supporting AVRO, JSON Schema and Protobuf schema types. Generate Artificial
Expand Down
2 changes: 1 addition & 1 deletion pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@

<artifactId>kloadgen</artifactId>

<version>5.5.1</version>
<version>5.6.0</version>

<name>KLoadGen</name>
<description>Load Generation Jmeter plugin for Kafka Cluster. Supporting AVRO, JSON Schema and Protobuf schema types. Generate Artificial
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -26,9 +26,11 @@
import com.sngular.kloadgen.extractor.extractors.JsonExtractor;
import com.sngular.kloadgen.extractor.extractors.ProtoBufExtractor;
import com.sngular.kloadgen.model.FieldValueMapping;
import com.sngular.kloadgen.sampler.schemaregistry.schema.ApicurioParsedSchema;
import com.sngular.kloadgen.sampler.schemaregistry.adapter.impl.ApicurioParsedSchemaMetadata;
import com.sngular.kloadgen.sampler.schemaregistry.adapter.impl.ParsedSchemaAdapter;
import com.sngular.kloadgen.util.JMeterHelper;
import com.sngular.kloadgen.util.SchemaRegistryKeyHelper;
import com.squareup.wire.schema.internal.parser.ProtoFileElement;
import com.squareup.wire.schema.internal.parser.TypeElement;
import io.confluent.kafka.schemaregistry.ParsedSchema;
import io.confluent.kafka.schemaregistry.avro.AvroSchema;
Expand Down Expand Up @@ -62,17 +64,16 @@ public SchemaExtractorImpl(final AvroExtractor avroExtractor, final JsonExtracto
@Override
public final Pair<String, List<FieldValueMapping>> flatPropertiesList(final String subjectName) throws IOException, RestClientException {
String schemaType = null;
Properties properties = JMeterContextService.getContext().getProperties();
final Properties properties = JMeterContextService.getContext().getProperties();

final var schemaParsed = JMeterHelper.getParsedSchema(subjectName, properties);
final List<FieldValueMapping> attributeList = new ArrayList<>();
final HashMap<String, TypeElement> nestedTypes = new HashMap<>();
String registryName = properties.getProperty(SchemaRegistryKeyHelper.SCHEMA_REGISTRY_NAME);
final String registryName = properties.getProperty(SchemaRegistryKeyHelper.SCHEMA_REGISTRY_NAME);
final ParsedSchemaAdapter parsedSchemaAdapter = schemaParsed.getParsedSchemaAdapter();
schemaType = parsedSchemaAdapter.getType();
if (Objects.nonNull(registryName) && registryName.equalsIgnoreCase(SchemaRegistryEnum.APICURIO.toString())) {
ApicurioParsedSchema apicurioParsedSchema = (ApicurioParsedSchema) schemaParsed;
Object schema = apicurioParsedSchema.getSchema();

schemaType = apicurioParsedSchema.getType();
final Object schema = ((ApicurioParsedSchemaMetadata) parsedSchemaAdapter).getSchema();
if (SchemaTypeEnum.AVRO.name().equalsIgnoreCase(schemaType)) {
((Schema) schema).getFields().forEach(field -> avroExtractor.processField(field, attributeList, true, false));
} else if (SchemaTypeEnum.JSON.name().equalsIgnoreCase(schemaType)) {
Expand All @@ -81,19 +82,17 @@ public final Pair<String, List<FieldValueMapping>> flatPropertiesList(final Stri
final var protoFileElement = ((io.apicurio.registry.utils.protobuf.schema.ProtobufSchema) schema).getProtoFileElement();
protoFileElement.getTypes().forEach(field -> protoBufExtractor.processField(field, attributeList, protoFileElement.getImports(), false, nestedTypes));
} else {
throw new KLoadGenException(String.format("Schema type not supported %s", apicurioParsedSchema.getType()));
throw new KLoadGenException(String.format("Schema type not supported %s", schemaType));
}

} else if (Objects.nonNull(registryName) && registryName.equalsIgnoreCase(SchemaRegistryEnum.CONFLUENT.toString())) {
ParsedSchema confluentParsedSchema = (ParsedSchema) schemaParsed;

schemaType = confluentParsedSchema.schemaType();
if (SchemaTypeEnum.AVRO.name().equalsIgnoreCase(schemaType)) {
(((AvroSchema) confluentParsedSchema).rawSchema()).getFields().forEach(field -> avroExtractor.processField(field, attributeList, true, false));
((Schema) parsedSchemaAdapter.getRawSchema()).getFields().forEach(field -> avroExtractor.processField(field, attributeList, true, false));
} else if (SchemaTypeEnum.JSON.name().equalsIgnoreCase(schemaType)) {
attributeList.addAll(jsonExtractor.processSchema(((JsonSchema) confluentParsedSchema).toJsonNode()));
final JsonSchema jsonSchema = new JsonSchema(parsedSchemaAdapter.getRawSchema().toString());
attributeList.addAll(jsonExtractor.processSchema(jsonSchema.toJsonNode()));
} else if (SchemaTypeEnum.PROTOBUF.name().equalsIgnoreCase(schemaType)) {
final var protoFileElement = ((ProtobufSchema) confluentParsedSchema).rawSchema();
final var protoFileElement = (ProtoFileElement) parsedSchemaAdapter.getRawSchema();
protoFileElement.getTypes().forEach(field -> protoBufExtractor.processField(field, attributeList, protoFileElement.getImports(), false, nestedTypes));
} else {
throw new KLoadGenException(String.format("Schema type not supported %s", schemaType));
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,10 @@
import com.sngular.kloadgen.processor.objectcreatorfactory.ObjectCreatorFactory;
import com.sngular.kloadgen.processor.util.SchemaProcessorUtils;
import com.sngular.kloadgen.randomtool.generator.AvroGeneratorTool;
import com.sngular.kloadgen.sampler.schemaregistry.adapter.impl.ApicurioParsedSchemaMetadata;
import com.sngular.kloadgen.sampler.schemaregistry.adapter.impl.BaseParsedSchema;
import com.sngular.kloadgen.sampler.schemaregistry.adapter.impl.BaseSchemaMetadata;
import com.sngular.kloadgen.sampler.schemaregistry.adapter.impl.ParsedSchemaAdapter;
import com.sngular.kloadgen.sampler.schemaregistry.adapter.impl.SchemaMetadataAdapter;
import com.sngular.kloadgen.serializer.EnrichedRecord;
import io.confluent.kafka.schemaregistry.ParsedSchema;
Expand All @@ -45,6 +48,14 @@ public AvroObjectCreatorFactory(final Object schema, final BaseSchemaMetadata<?
this.schema = (Schema) ((ParsedSchema) schema).rawSchema();
} else if (schema instanceof Schema) {
this.schema = (Schema) schema;
} else if (schema instanceof BaseParsedSchema) {
final BaseParsedSchema schemaParse = (BaseParsedSchema) schema;
final ParsedSchemaAdapter adapterParse = schemaParse.getParsedSchemaAdapter();
if (adapterParse instanceof ApicurioParsedSchemaMetadata) {
this.schema = (Schema) ((ApicurioParsedSchemaMetadata) adapterParse).getSchema();
} else {
this.schema = adapterParse.getRawSchema();
}
} else {
throw new KLoadGenException("Unsupported schema type");
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,9 @@
import com.sngular.kloadgen.processor.objectcreatorfactory.ObjectCreatorFactory;
import com.sngular.kloadgen.processor.util.SchemaProcessorUtils;
import com.sngular.kloadgen.randomtool.generator.ProtoBufGeneratorTool;
import com.sngular.kloadgen.sampler.schemaregistry.adapter.impl.BaseParsedSchema;
import com.sngular.kloadgen.sampler.schemaregistry.adapter.impl.BaseSchemaMetadata;
import com.sngular.kloadgen.sampler.schemaregistry.adapter.impl.ParsedSchemaAdapter;
import com.sngular.kloadgen.sampler.schemaregistry.adapter.impl.SchemaMetadataAdapter;
import com.sngular.kloadgen.serializer.EnrichedRecord;
import com.squareup.wire.schema.internal.parser.ProtoFileElement;
Expand All @@ -46,6 +48,11 @@ public ProtobufObjectCreatorFactory(final Object schema, final BaseSchemaMetadat
this.schema = SchemaProcessorUtils.buildProtoDescriptor((ProtoFileElement) ((ParsedSchema) schema).rawSchema(), metadata);
} else if (schema instanceof ProtoFileElement) {
this.schema = SchemaProcessorUtils.buildProtoDescriptor((ProtoFileElement) schema, metadata);
} else if (schema instanceof BaseParsedSchema) {
final BaseParsedSchema schemaParse = (BaseParsedSchema) schema;
final ParsedSchemaAdapter adapterParse = schemaParse.getParsedSchemaAdapter();
final Object schemaParsed = adapterParse.getRawSchema();
this.schema = SchemaProcessorUtils.buildProtoDescriptor((ProtoFileElement) schemaParsed, metadata);
} else {
throw new KLoadGenException("Unsupported schema type");
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -26,6 +26,7 @@
import com.google.protobuf.Descriptors.DescriptorValidationException;
import com.sngular.kloadgen.model.FieldValueMapping;
import com.sngular.kloadgen.sampler.schemaregistry.adapter.impl.BaseSchemaMetadata;
import com.sngular.kloadgen.sampler.schemaregistry.adapter.impl.ParsedSchemaAdapter;
import com.sngular.kloadgen.sampler.schemaregistry.adapter.impl.SchemaMetadataAdapter;
import com.sngular.kloadgen.util.JMeterHelper;
import com.sngular.kloadgen.util.ProtobufHelper;
Expand Down Expand Up @@ -150,6 +151,7 @@ public static String[] splitAndNormalizeFullFieldName(final String fullFieldName
return Arrays.stream(fields).map(field -> field.replaceAll("\\[.*]", "")).toArray(String[]::new);
}

@SuppressWarnings("checkstyle:SingleSpaceSeparator")
public static Descriptors.Descriptor buildProtoDescriptor(final ProtoFileElement schema, final BaseSchemaMetadata<? extends SchemaMetadataAdapter> metadata)
throws Descriptors.DescriptorValidationException, IOException {

Expand All @@ -166,8 +168,9 @@ public static Descriptors.Descriptor buildProtoDescriptor(final ProtoFileElement
schemaBuilder.addSchema(importedSchema);
}
} else {
final var importedProtobufSchema = (ProtobufSchema) JMeterHelper.getParsedSchema(getSubjectName(importedClass, metadata),
JMeterContextService.getContext().getProperties());
final ParsedSchemaAdapter protoFileElement = JMeterHelper.getParsedSchema(getSubjectName(importedClass, metadata),
JMeterContextService.getContext().getProperties()).getParsedSchemaAdapter();
final var importedProtobufSchema = new ProtobufSchema(protoFileElement.getRawSchema(), metadata.getSchemaMetadataAdapter().getReferences(), new HashMap<>());
if (!ProtobufHelper.NOT_ACCEPTED_IMPORTS.contains(importedClass)) {
schemaBuilder.addDependency(importedProtobufSchema.toDescriptor().getFullName());
schemaBuilder.addSchema(convertDynamicSchema(importedProtobufSchema));
Expand Down
3 changes: 2 additions & 1 deletion src/main/java/com/sngular/kloadgen/sampler/SamplerUtil.java
Original file line number Diff line number Diff line change
Expand Up @@ -327,7 +327,8 @@ private static void verifySecurity(final JavaSamplerContext context, final Prope
props.put(SslConfigs.SSL_ENABLED_PROTOCOLS_CONFIG, context.getParameter(SslConfigs.SSL_ENABLED_PROTOCOLS_CONFIG));

props.put(SslConfigs.SSL_ENDPOINT_IDENTIFICATION_ALGORITHM_CONFIG,
propertyOrDefault(context.getParameter(SslConfigs.SSL_ENDPOINT_IDENTIFICATION_ALGORITHM_CONFIG), ProducerKeysHelper.DEFAULT_SSL_ENDPOINT_IDENTIFICATION_ALGORITHM,
propertyOrDefault(context.getParameter(SslConfigs.SSL_ENDPOINT_IDENTIFICATION_ALGORITHM_CONFIG),
ProducerKeysHelper.DEFAULT_SSL_ENDPOINT_IDENTIFICATION_ALGORITHM,
""));

props.put(SslConfigs.SSL_KEYMANAGER_ALGORITHM_CONFIG, context.getParameter(SslConfigs.SSL_KEYMANAGER_ALGORITHM_CONFIG));
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@
import java.util.Collection;
import java.util.Map;

import com.sngular.kloadgen.sampler.schemaregistry.adapter.impl.BaseParsedSchema;
import com.sngular.kloadgen.sampler.schemaregistry.adapter.impl.BaseSchemaMetadata;
import com.sngular.kloadgen.sampler.schemaregistry.adapter.impl.SchemaMetadataAdapter;

Expand All @@ -18,7 +19,7 @@ public interface SchemaRegistryAdapter {

BaseSchemaMetadata getLatestSchemaMetadata(String subjectName);

Object getSchemaBySubject(String subjectName);
BaseParsedSchema getSchemaBySubject(String subjectName);

Object getSchemaBySubjectAndId(String subjectName, BaseSchemaMetadata<? extends SchemaMetadataAdapter> metadata);
BaseParsedSchema getSchemaBySubjectAndId(String subjectName, BaseSchemaMetadata<? extends SchemaMetadataAdapter> metadata);
}
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
package com.sngular.kloadgen.sampler.schemaregistry.schema;
package com.sngular.kloadgen.sampler.schemaregistry.adapter.impl;

import lombok.Getter;
import lombok.NoArgsConstructor;
Expand All @@ -7,11 +7,12 @@
@Getter
@Setter
@NoArgsConstructor
public class ApicurioParsedSchema {

private String type;
public class ApicurioParsedSchemaMetadata extends ParsedSchemaAdapter {

private Object schema;

private String rawSchema;

private String type;

}
Original file line number Diff line number Diff line change
@@ -0,0 +1,15 @@
package com.sngular.kloadgen.sampler.schemaregistry.adapter.impl;

import org.apache.avro.Schema.Parser;

public class BaseParsedSchema <T extends ParsedSchemaAdapter> extends Parser {

private final T parsedSchemaAdapter;

public BaseParsedSchema(T parsedSchemaAdapter) {this.parsedSchemaAdapter = parsedSchemaAdapter;}


public T getParsedSchemaAdapter(){
return parsedSchemaAdapter;
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,65 @@
package com.sngular.kloadgen.sampler.schemaregistry.adapter.impl;

import java.util.List;

import io.confluent.kafka.schemaregistry.ParsedSchema;
import io.confluent.kafka.schemaregistry.client.rest.entities.SchemaReference;
import io.confluent.kafka.schemaregistry.protobuf.ProtobufSchema;
import lombok.Getter;
import org.apache.avro.Schema;

@Getter
public class ConfluentParsedSchemaMetadata extends ParsedSchemaAdapter {

private String schemaType;

private String name;

private String canonicalString;

private Object rawSchema;

private ConfluentParsedSchemaMetadata(ParsedSchema parsedSchema){
this.schemaType = parsedSchema.schemaType();
this.name = parsedSchema.name();
this.canonicalString = parsedSchema.canonicalString();
this.rawSchema = parsedSchema.rawSchema();
}

private ConfluentParsedSchemaMetadata(Schema schema){
this.schemaType = schema.getType().getName();
this.name = schema.getName();
}

public ConfluentParsedSchemaMetadata(ProtobufSchema schema){
this.schemaType = schema.schemaType();
this.name = schema.name();
this.rawSchema = schema.rawSchema();
this.canonicalString = schema.canonicalString();
}

public ConfluentParsedSchemaMetadata() {

}

public static ParsedSchemaAdapter parse(final ParsedSchema parsedSchema) {
return new ConfluentParsedSchemaMetadata(parsedSchema);
}

public static ParsedSchemaAdapter parse(final Schema schema) {
return new ConfluentParsedSchemaMetadata(schema);
}
public static ParsedSchemaAdapter parse (final ProtobufSchema schema){
return new ConfluentParsedSchemaMetadata(schema);
}

@Override
public String getType() {
return this.schemaType;
}

@Override
public Object getRawSchema() {
return this.rawSchema;
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,7 @@
package com.sngular.kloadgen.sampler.schemaregistry.adapter.impl;

public abstract class ParsedSchemaAdapter {
public abstract <T> T getType();

public abstract <T> T getRawSchema();
}
Original file line number Diff line number Diff line change
Expand Up @@ -15,10 +15,12 @@
import com.sngular.kloadgen.exception.KLoadGenException;
import com.sngular.kloadgen.sampler.schemaregistry.SchemaRegistryAdapter;
import com.sngular.kloadgen.sampler.schemaregistry.SchemaRegistryConstants;
import com.sngular.kloadgen.sampler.schemaregistry.adapter.impl.ApicurioParsedSchemaMetadata;
import com.sngular.kloadgen.sampler.schemaregistry.adapter.impl.ApicurioSchemaMetadata;
import com.sngular.kloadgen.sampler.schemaregistry.adapter.impl.BaseParsedSchema;
import com.sngular.kloadgen.sampler.schemaregistry.adapter.impl.BaseSchemaMetadata;
import com.sngular.kloadgen.sampler.schemaregistry.adapter.impl.ParsedSchemaAdapter;
import com.sngular.kloadgen.sampler.schemaregistry.adapter.impl.SchemaMetadataAdapter;
import com.sngular.kloadgen.sampler.schemaregistry.schema.ApicurioParsedSchema;
import com.sngular.kloadgen.util.SchemaRegistryKeyHelper;
import io.apicurio.registry.resolver.SchemaParser;
import io.apicurio.registry.rest.client.RegistryClient;
Expand All @@ -39,6 +41,8 @@ public class ApicurioSchemaRegistry implements SchemaRegistryAdapter {

private Map<String, String> propertiesMap;

private ApicurioParsedSchemaMetadata apicurioParsedSchemaMetadata;

public ApicurioSchemaRegistry() {
this.propertiesMap = Map.of(SchemaRegistryKeyHelper.SCHEMA_REGISTRY_NAME, SchemaRegistryConstants.SCHEMA_REGISTRY_APICURIO,
SchemaRegistryKeyHelper.SCHEMA_REGISTRY_URL_KEY, SerdeConfig.REGISTRY_URL);
Expand Down Expand Up @@ -88,8 +92,8 @@ public BaseSchemaMetadata<ApicurioSchemaMetadata> getLatestSchemaMetadata(String
}

@Override
public ApicurioParsedSchema getSchemaBySubject(String subjectName) {
final ApicurioParsedSchema schema = new ApicurioParsedSchema();
public BaseParsedSchema<ApicurioParsedSchemaMetadata> getSchemaBySubject(String subjectName) {
final ApicurioParsedSchemaMetadata schema = new ApicurioParsedSchemaMetadata();
try {
List<SearchedArtifact> artifacts = this.schemaRegistryClient.searchArtifacts(null, subjectName, null,
null, null, null, null, null, null).getArtifacts();
Expand All @@ -114,15 +118,17 @@ public ApicurioParsedSchema getSchemaBySubject(String subjectName) {
throw new KLoadGenException(String.format("Schema type not supported %s", searchedArtifactType));
}
schema.setType(searchedArtifactType);
return schema;
ParsedSchemaAdapter parsedSchemaAdapter = schema;
return new BaseParsedSchema(parsedSchemaAdapter);
}
} catch (IOException e) {
throw new KLoadGenException(e);
}
}

public Object getSchemaBySubjectAndId(String subjectName, BaseSchemaMetadata<? extends SchemaMetadataAdapter> metadata) {
Object schema = null;
public BaseParsedSchema<ApicurioParsedSchemaMetadata> getSchemaBySubjectAndId(String subjectName, BaseSchemaMetadata<? extends SchemaMetadataAdapter> metadata) {
final ApicurioParsedSchemaMetadata schema = new ApicurioParsedSchemaMetadata();

SchemaMetadataAdapter schemaMetadataAdapter = metadata.getSchemaMetadataAdapter();
try {
InputStream inputStream = this.schemaRegistryClient.getContentByGlobalId(schemaMetadataAdapter.getGlobalId());
Expand All @@ -131,20 +137,21 @@ public Object getSchemaBySubjectAndId(String subjectName, BaseSchemaMetadata<? e
String searchedArtifactType = schemaMetadataAdapter.getType();
if (SchemaTypeEnum.AVRO.name().equalsIgnoreCase(searchedArtifactType)) {
SchemaParser parserAvro = new AvroSchemaParser(null);
schema = parserAvro.parseSchema(result.getBytes(StandardCharsets.UTF_8), new HashMap<>());
schema.setSchema(parserAvro.parseSchema(result.getBytes(StandardCharsets.UTF_8), new HashMap<>()));
} else if (SchemaTypeEnum.JSON.name().equalsIgnoreCase(searchedArtifactType)) {
SchemaParser parserJson = new JsonSchemaParser();
schema = parserJson.parseSchema(result.getBytes(StandardCharsets.UTF_8), new HashMap<>());
schema.setSchema(parserJson.parseSchema(result.getBytes(StandardCharsets.UTF_8), new HashMap<>()));
} else if (SchemaTypeEnum.PROTOBUF.name().equalsIgnoreCase(searchedArtifactType)) {
SchemaParser parserProtobuf = new ProtobufSchemaParser();
schema = parserProtobuf.parseSchema(result.getBytes(StandardCharsets.UTF_8), new HashMap<>());
schema.setSchema(parserProtobuf.parseSchema(result.getBytes(StandardCharsets.UTF_8), new HashMap<>()));
} else {
throw new KLoadGenException(String.format("Schema type not supported %s", searchedArtifactType));
}
} catch (IOException e) {
throw new RuntimeException(e);
}
return schema;
ParsedSchemaAdapter parsedSchemaAdapter = schema;
return new BaseParsedSchema(parsedSchemaAdapter);
}

private SearchedArtifact getLastestSearchedArtifact(final String subjectName) {
Expand Down
Loading

0 comments on commit d6e397b

Please sign in to comment.