mirror of
https://github.com/HangarMC/Hangar.git
synced 2024-11-21 01:21:54 +08:00
chore: delete graphql shit, remove usage of preview features
This commit is contained in:
parent
4de8dac6d3
commit
ad6effd074
@ -8,7 +8,7 @@
|
||||
</list>
|
||||
</option>
|
||||
</component>
|
||||
<component name="ProjectRootManager" version="2" languageLevel="JDK_21_PREVIEW" project-jdk-name="corretto-21" project-jdk-type="JavaSDK">
|
||||
<component name="ProjectRootManager" version="2" languageLevel="JDK_21" project-jdk-name="corretto-21" project-jdk-type="JavaSDK">
|
||||
<output url="file://$PROJECT_DIR$/target" />
|
||||
</component>
|
||||
</project>
|
||||
</project>
|
@ -148,10 +148,6 @@
|
||||
<groupId>org.springframework.boot</groupId>
|
||||
<artifactId>spring-boot-starter-webflux</artifactId>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.springframework.boot</groupId>
|
||||
<artifactId>spring-boot-starter-graphql</artifactId>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>org.apache.commons</groupId>
|
||||
@ -429,16 +425,12 @@
|
||||
<testSource>${java.version}</testSource>
|
||||
<testTarget>${java.version}</testTarget>
|
||||
<encoding>${encoding}</encoding>
|
||||
<compilerArgs>--enable-preview</compilerArgs>
|
||||
</configuration>
|
||||
</plugin>
|
||||
<plugin>
|
||||
<groupId>org.apache.maven.plugins</groupId>
|
||||
<artifactId>maven-surefire-plugin</artifactId>
|
||||
<version>3.3.1</version>
|
||||
<configuration>
|
||||
<argLine>--enable-preview</argLine>
|
||||
</configuration>
|
||||
</plugin>
|
||||
<plugin>
|
||||
<groupId>org.springframework.boot</groupId>
|
||||
|
@ -36,7 +36,7 @@ public class SentryCacheableAspect {
|
||||
if (CompletionStage.class.isAssignableFrom(method.getReturnType())) {
|
||||
try {
|
||||
return ((CompletionStage<?>) pjp.proceed())
|
||||
.whenComplete((_, _) -> childSpan.finish());
|
||||
.whenComplete((result, ex) -> childSpan.finish());
|
||||
} finally {
|
||||
childSpan.finish();
|
||||
}
|
||||
|
@ -1,30 +0,0 @@
|
||||
package io.papermc.hangar.components.query;
|
||||
|
||||
import graphql.execution.ExecutionStepInfo;
|
||||
import graphql.schema.GraphQLList;
|
||||
|
||||
public final class PrefixUtil {
|
||||
|
||||
private PrefixUtil() {
|
||||
}
|
||||
|
||||
public static String getParentAlias(final ExecutionStepInfo info, final QueryBuilder queryBuilder) {
|
||||
return getParentTable(info, queryBuilder, true);
|
||||
}
|
||||
|
||||
public static String getParentTable(final ExecutionStepInfo info, final QueryBuilder queryBuilder) {
|
||||
return getParentTable(info, queryBuilder, false);
|
||||
}
|
||||
|
||||
private static String getParentTable(final ExecutionStepInfo info, final QueryBuilder queryBuilder, final boolean alias) {
|
||||
final ExecutionStepInfo parent = info.getParent();
|
||||
if (parent == null || parent.getObjectType() == null || parent.getObjectType().getName().equals("Query")) {
|
||||
return queryBuilder.rootTable + (alias ? "_" : ".");
|
||||
} else if (parent.getType() instanceof GraphQLList) {
|
||||
// skip lists, else we would match them twice
|
||||
return getParentTable(parent, queryBuilder, true);
|
||||
} else {
|
||||
return getParentTable(parent, queryBuilder, true) + parent.getField().getName() + (alias ? "_" : ".");
|
||||
}
|
||||
}
|
||||
}
|
@ -1,92 +0,0 @@
|
||||
package io.papermc.hangar.components.query;
|
||||
|
||||
import graphql.GraphQLContext;
|
||||
import java.util.Comparator;
|
||||
import java.util.HashMap;
|
||||
import java.util.HashSet;
|
||||
import java.util.LinkedHashSet;
|
||||
import java.util.LinkedList;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
import java.util.function.Function;
|
||||
import org.jdbi.v3.core.Handle;
|
||||
import org.jdbi.v3.core.statement.Query;
|
||||
|
||||
public class QueryBuilder {
|
||||
|
||||
public static final String QUERY_BUILDER = "queryBuilder";
|
||||
|
||||
String rootTable = "";
|
||||
String from = "";
|
||||
String condition = "";
|
||||
Set<String> fields = new HashSet<>();
|
||||
Set<String> joins = new LinkedHashSet<>();
|
||||
Map<String, Function<Map<String, String>, String>> resolver = new HashMap<>();
|
||||
Map<String, Object> variables = new HashMap<>();
|
||||
|
||||
public static List<QueryBuilder> getAllQueryBuilders(final GraphQLContext context) {
|
||||
return context.getOrDefault(QUERY_BUILDER, List.of());
|
||||
}
|
||||
|
||||
public static QueryBuilder getActiveQueryBuilder(final GraphQLContext context) {
|
||||
return context.<List<QueryBuilder>>get(QUERY_BUILDER).getLast();
|
||||
}
|
||||
|
||||
public static QueryBuilder newQueryBuilder(final GraphQLContext context) {
|
||||
if (!context.hasKey(QUERY_BUILDER)) {
|
||||
context.put(QUERY_BUILDER, new LinkedList<>());
|
||||
}
|
||||
final QueryBuilder newBuilder = new QueryBuilder();
|
||||
context.<List<QueryBuilder>>get(QUERY_BUILDER).add(newBuilder);
|
||||
return newBuilder;
|
||||
}
|
||||
|
||||
public String buildSql() {
|
||||
return STR."""
|
||||
SELECT
|
||||
|
||||
\{this.fields.stream().sorted(Comparator.comparing(String::length)).reduce((a, b) -> a + ",\n" + b).orElse("")}
|
||||
|
||||
\{this.from}
|
||||
|
||||
\{this.joins.stream().reduce((a, b) -> a + "\n" + b).orElse("")}
|
||||
|
||||
\{this.condition};
|
||||
""";
|
||||
}
|
||||
|
||||
public List<Map<String, String>> execute(final Handle handle, final String sql) {
|
||||
Query select = handle.select(sql);
|
||||
// bind the arguments
|
||||
for (final var entry : this.variables.entrySet()) {
|
||||
select = select.bind(entry.getKey(), entry.getValue());
|
||||
}
|
||||
|
||||
// execute the query
|
||||
return select.mapToMap(String.class).collectIntoList();
|
||||
}
|
||||
|
||||
public void handleResolvers(final List<Map<String, String>> result) {
|
||||
Set<String> keysToRemove = null;
|
||||
for (final Map<String, String> inputMap : result) {
|
||||
// run the resolvers
|
||||
for (final var entry : this.resolver.entrySet()) {
|
||||
inputMap.put(entry.getKey(), entry.getValue().apply(inputMap));
|
||||
}
|
||||
// first time: find the ext keys
|
||||
if (keysToRemove == null) {
|
||||
keysToRemove = new HashSet<>();
|
||||
for (final String key : inputMap.keySet()) {
|
||||
if (key.startsWith("ext_")) {
|
||||
keysToRemove.add(key);
|
||||
}
|
||||
}
|
||||
}
|
||||
// remove th ext keys
|
||||
for (final String key : keysToRemove) {
|
||||
inputMap.remove(key);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
@ -1,186 +0,0 @@
|
||||
package io.papermc.hangar.components.query;
|
||||
|
||||
import com.fasterxml.jackson.core.JsonGenerator;
|
||||
import com.fasterxml.jackson.databind.SerializerProvider;
|
||||
import com.fasterxml.jackson.databind.module.SimpleModule;
|
||||
import com.fasterxml.jackson.databind.ser.std.StdSerializer;
|
||||
import graphql.ExecutionInput;
|
||||
import graphql.ExecutionResult;
|
||||
import graphql.GraphQLError;
|
||||
import graphql.execution.instrumentation.Instrumentation;
|
||||
import graphql.execution.instrumentation.InstrumentationState;
|
||||
import graphql.execution.instrumentation.parameters.InstrumentationExecutionParameters;
|
||||
import graphql.execution.instrumentation.parameters.InstrumentationFieldFetchParameters;
|
||||
import graphql.schema.DataFetcher;
|
||||
import graphql.schema.GraphQLScalarType;
|
||||
import graphql.schema.GraphQLSchema;
|
||||
import graphql.schema.PropertyDataFetcher;
|
||||
import java.io.IOException;
|
||||
import java.sql.SQLException;
|
||||
import java.time.Duration;
|
||||
import java.time.LocalDateTime;
|
||||
import java.util.ArrayList;
|
||||
import java.util.HashMap;
|
||||
import java.util.LinkedHashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.concurrent.CompletableFuture;
|
||||
import org.jdbi.v3.core.Jdbi;
|
||||
import org.jetbrains.annotations.NotNull;
|
||||
import org.postgresql.jdbc.PgArray;
|
||||
import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty;
|
||||
import org.springframework.context.annotation.Bean;
|
||||
import org.springframework.context.annotation.Configuration;
|
||||
|
||||
import static io.papermc.hangar.components.query.QueryBuilder.getActiveQueryBuilder;
|
||||
import static io.papermc.hangar.components.query.QueryBuilder.getAllQueryBuilders;
|
||||
import static io.papermc.hangar.components.query.QueryHelper.EMPTY;
|
||||
|
||||
@Configuration
|
||||
@ConditionalOnProperty("enable-graphql")
|
||||
public class QueryConfig {
|
||||
|
||||
private final Jdbi jdbi;
|
||||
|
||||
public QueryConfig(final Jdbi jdbi) {
|
||||
this.jdbi = jdbi;
|
||||
}
|
||||
|
||||
@Bean
|
||||
public Instrumentation instrumentation() {
|
||||
return new Instrumentation() {
|
||||
|
||||
@Override
|
||||
public @NotNull GraphQLSchema instrumentSchema(final GraphQLSchema schema, final InstrumentationExecutionParameters parameters, final InstrumentationState state) {
|
||||
return Instrumentation.super.instrumentSchema(schema, parameters, state);
|
||||
}
|
||||
|
||||
@Override
|
||||
public @NotNull ExecutionInput instrumentExecutionInput(final ExecutionInput executionInput, final InstrumentationExecutionParameters parameters, final InstrumentationState state) {
|
||||
System.out.println("start!");
|
||||
parameters.getGraphQLContext().put("startTime", LocalDateTime.now());
|
||||
return Instrumentation.super.instrumentExecutionInput(executionInput, parameters, state);
|
||||
}
|
||||
|
||||
@Override
|
||||
public @NotNull DataFetcher<?> instrumentDataFetcher(final DataFetcher<?> dataFetcher, final InstrumentationFieldFetchParameters parameters, final InstrumentationState state) {
|
||||
System.out.println(STR."fetch \{parameters.getEnvironment().getField().getName()} using \{dataFetcher.getClass().getName()} \{parameters.getEnvironment().getExecutionStepInfo().getPath()}");
|
||||
// replace the default property data fetcher with our own
|
||||
if (dataFetcher instanceof final PropertyDataFetcher<?> propertyDataFetcher) {
|
||||
final QueryBuilder queryBuilder = getActiveQueryBuilder(parameters.getEnvironment().getGraphQlContext());
|
||||
final String parentAlias = PrefixUtil.getParentAlias(parameters.getEnvironment().getExecutionStepInfo(), queryBuilder);
|
||||
final String parentTable = PrefixUtil.getParentTable(parameters.getEnvironment().getExecutionStepInfo(), queryBuilder);
|
||||
queryBuilder.fields.add(STR."\{parentTable}\{propertyDataFetcher.getPropertyName()} AS \{parentAlias}\{parameters.getExecutionStepInfo().getPath().getSegmentName()}");
|
||||
|
||||
// find return type
|
||||
if (parameters.getField().getType() instanceof final GraphQLScalarType scalarType) {
|
||||
if (scalarType.getName().equals("Int")) {
|
||||
return (DataFetcher<Integer>) dum -> 0;
|
||||
}
|
||||
}
|
||||
return (DataFetcher<Object>) dum -> EMPTY;
|
||||
}
|
||||
return Instrumentation.super.instrumentDataFetcher(dataFetcher, parameters, state);
|
||||
}
|
||||
|
||||
@Override
|
||||
public @NotNull CompletableFuture<ExecutionResult> instrumentExecutionResult(final ExecutionResult executionResult, final InstrumentationExecutionParameters parameters, final InstrumentationState state) {
|
||||
final List<QueryBuilder> queryBuilders = getAllQueryBuilders(parameters.getGraphQLContext());
|
||||
final QueryMerger merger = new QueryMerger(parameters.getSchema());
|
||||
|
||||
// (parsing) error? -> return
|
||||
if (!executionResult.getErrors().isEmpty()) {
|
||||
return CompletableFuture.completedFuture(executionResult);
|
||||
}
|
||||
|
||||
// introspection query? -> return
|
||||
if (parameters.getOperation() != null && parameters.getOperation().equals("IntrospectionQuery")) {
|
||||
return CompletableFuture.completedFuture(executionResult);
|
||||
}
|
||||
|
||||
final Map<String, Object> totalResult = new HashMap<>();
|
||||
final Map<Object, Object> totalExt = LinkedHashMap.newLinkedHashMap(queryBuilders.size());
|
||||
final List<GraphQLError> errors = new ArrayList<>();
|
||||
// TODO we can run these concurrently
|
||||
for (final QueryBuilder queryBuilder : queryBuilders) {
|
||||
final String sql = queryBuilder.buildSql();
|
||||
final LocalDateTime parseTime = LocalDateTime.now();
|
||||
|
||||
try {
|
||||
QueryConfig.this.jdbi.useHandle((handle -> {
|
||||
// run the query
|
||||
final var resultList = queryBuilder.execute(handle, sql);
|
||||
|
||||
final LocalDateTime executionTime = LocalDateTime.now();
|
||||
|
||||
// handle resolvers
|
||||
queryBuilder.handleResolvers(resultList);
|
||||
|
||||
// merge the result
|
||||
final var result = merger.merge(resultList);
|
||||
|
||||
// collect some data
|
||||
final LocalDateTime startTime = parameters.getGraphQLContext().get("startTime");
|
||||
final LocalDateTime endTime = LocalDateTime.now();
|
||||
|
||||
final var ext = LinkedHashMap.newLinkedHashMap(5);
|
||||
ext.put("sql", sql.split("\n"));
|
||||
ext.put("sql2", sql.replace("\n", " "));
|
||||
ext.put("parseTime", Duration.between(startTime, parseTime).toMillis() + "ms");
|
||||
ext.put("executionTime", Duration.between(parseTime, executionTime).toMillis() + "ms");
|
||||
ext.put("resolveTime", Duration.between(executionTime, endTime).toMillis() + "ms");
|
||||
ext.put("totalTime", Duration.between(startTime, endTime).toMillis() + "ms");
|
||||
|
||||
// store the result
|
||||
totalResult.putAll(result);
|
||||
totalExt.put(queryBuilder.rootTable, ext);
|
||||
}));
|
||||
} catch (Exception ex) {
|
||||
final var error = LinkedHashMap.<String, Object>newLinkedHashMap(3);
|
||||
error.put("message", ex.getMessage() != null ? ex.getMessage().split("\n") : "<null>");
|
||||
error.put("sql", sql.split("\n"));
|
||||
error.put("sql2", sql.replace("\n", " "));
|
||||
errors.add(GraphQLError.newError().message("Dum").extensions(error).build());
|
||||
}
|
||||
}
|
||||
|
||||
return CompletableFuture.completedFuture(ExecutionResult.newExecutionResult()
|
||||
.data(totalResult)
|
||||
.extensions(totalExt)
|
||||
.errors(errors)
|
||||
.build());
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
@Bean // TODO remove again eventually
|
||||
public SimpleModule queryPostgresSerializer() {
|
||||
final SimpleModule module = new SimpleModule();
|
||||
module.addSerializer(new StdSerializer<>(PgArray.class) {
|
||||
|
||||
@Override
|
||||
public void serialize(final PgArray value, final JsonGenerator gen, final SerializerProvider provider) throws IOException {
|
||||
gen.writeStartArray();
|
||||
final Object array;
|
||||
try {
|
||||
array = value.getArray();
|
||||
} catch (final SQLException e) {
|
||||
throw new RuntimeException(e);
|
||||
}
|
||||
if (array instanceof final Object[] arr) {
|
||||
for (final Object o : arr) {
|
||||
gen.writeObject(o);
|
||||
}
|
||||
} else if (array instanceof final Iterable<?> it) {
|
||||
for (final Object o : it) {
|
||||
gen.writeObject(o);
|
||||
}
|
||||
} else {
|
||||
throw new RuntimeException("Unknown array type: " + array.getClass());
|
||||
}
|
||||
gen.writeEndArray();
|
||||
}
|
||||
});
|
||||
return module;
|
||||
}
|
||||
}
|
@ -1,74 +0,0 @@
|
||||
package io.papermc.hangar.components.query;
|
||||
|
||||
import graphql.schema.DataFetchingEnvironment;
|
||||
import io.papermc.hangar.components.images.service.AvatarService;
|
||||
import io.papermc.hangar.service.internal.file.FileService;
|
||||
import java.util.List;
|
||||
|
||||
import static io.papermc.hangar.components.query.QueryBuilder.getActiveQueryBuilder;
|
||||
import static io.papermc.hangar.components.query.QueryBuilder.newQueryBuilder;
|
||||
|
||||
public final class QueryHelper {
|
||||
|
||||
public static final Object EMPTY = new Object();
|
||||
public static final List<Object> EMPTY_LIST = List.of(EMPTY);
|
||||
|
||||
private QueryHelper() {
|
||||
}
|
||||
|
||||
public static List<Object> query(final DataFetchingEnvironment environment, final String rootTable) {
|
||||
return query(environment, rootTable, "");
|
||||
}
|
||||
|
||||
public static List<Object> query(final DataFetchingEnvironment environment, final String rootTable, final String condition) {
|
||||
final QueryBuilder queryBuilder = newQueryBuilder(environment.getGraphQlContext());
|
||||
queryBuilder.variables = environment.getExecutionStepInfo().getArguments();
|
||||
queryBuilder.rootTable = environment.getExecutionStepInfo().getPath().getSegmentName();
|
||||
queryBuilder.from = STR."FROM \{rootTable} \{queryBuilder.rootTable}";
|
||||
queryBuilder.condition = condition;
|
||||
return EMPTY_LIST;
|
||||
}
|
||||
public static List<Object> join(final DataFetchingEnvironment environment, final String table, final String alias, final String fieldA, final String fieldB) {
|
||||
return join(environment, table, alias, fieldA, fieldB, null);
|
||||
}
|
||||
|
||||
public static List<Object> join(final DataFetchingEnvironment environment, final String table, final String alias, final String fieldA, final String fieldB, final String secondTable) {
|
||||
final QueryBuilder queryBuilder = getActiveQueryBuilder(environment.getGraphQlContext());
|
||||
final String parentTable = secondTable == null ? PrefixUtil.getParentTable(environment.getExecutionStepInfo(), queryBuilder) : secondTable;
|
||||
final String parentAlias = PrefixUtil.getParentAlias(environment.getExecutionStepInfo(), queryBuilder);
|
||||
queryBuilder.joins.add(STR."LEFT JOIN \{table} \{parentAlias}\{alias} ON \{parentAlias}\{alias}.\{fieldA} = \{parentTable}\{fieldB}");
|
||||
return EMPTY_LIST;
|
||||
}
|
||||
|
||||
public static void selectField(final DataFetchingEnvironment environment, final String tableSuffix, final String qglField, final String dbField, final String resultField) {
|
||||
if (environment.getSelectionSet().contains(qglField)) {
|
||||
final QueryBuilder queryBuilder = getActiveQueryBuilder(environment.getGraphQlContext());
|
||||
final String parentAlias = PrefixUtil.getParentAlias(environment.getExecutionStepInfo(), queryBuilder);
|
||||
queryBuilder.fields.add(STR."\{parentAlias.substring(0, parentAlias.length() - 1)}\{tableSuffix}.\{dbField} AS \{parentAlias}\{resultField}");
|
||||
}
|
||||
}
|
||||
|
||||
public static Object avatarUrl(final DataFetchingEnvironment environment, final FileService fileService, final AvatarService avatarService, final String avatarType) {
|
||||
final String idVar = avatarType.equals(AvatarService.USER) ? "userid" : "projectid";
|
||||
final String idField = avatarType.equals(AvatarService.USER) ? "uuid" : "id";
|
||||
|
||||
final QueryBuilder queryBuilder = getActiveQueryBuilder(environment.getGraphQlContext());
|
||||
final String parentTable = PrefixUtil.getParentTable(environment.getExecutionStepInfo(), queryBuilder);
|
||||
final String parentAlias = PrefixUtil.getParentAlias(environment.getExecutionStepInfo(), queryBuilder);
|
||||
|
||||
final String avatarVersion = STR."ext_\{parentAlias.replace("_", "")}avatarversion";
|
||||
final String id = STR."ext_\{parentAlias.replace("_", "")}\{idVar}";
|
||||
queryBuilder.fields.add(STR."\{parentAlias}avatar.version AS \{avatarVersion}");
|
||||
queryBuilder.fields.add(STR."\{parentTable}\{idField} AS \{id}");
|
||||
|
||||
queryBuilder.joins.add(STR."LEFT JOIN avatars \{parentAlias}avatar ON \{parentAlias}avatar.type = '\{avatarType}' AND \{parentAlias}avatar.subject = \{parentTable}\{idField}::varchar");
|
||||
queryBuilder.resolver.put(parentAlias + environment.getExecutionStepInfo().getPath().getSegmentName(), (r) -> {
|
||||
// TODO for projects we need to call up to the owner and get the avatar from there? or should we handle that in frontend?
|
||||
if (r.get(avatarVersion) == null) {
|
||||
return avatarService.getDefaultAvatarUrl();
|
||||
}
|
||||
return fileService.getAvatarUrl(avatarType, String.valueOf(r.get(id)), r.get(avatarVersion));
|
||||
});
|
||||
return EMPTY;
|
||||
}
|
||||
}
|
@ -1,101 +0,0 @@
|
||||
package io.papermc.hangar.components.query;
|
||||
|
||||
import graphql.schema.DataFetchingEnvironment;
|
||||
import io.papermc.hangar.components.images.service.AvatarService;
|
||||
import io.papermc.hangar.service.internal.file.FileService;
|
||||
import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty;
|
||||
import org.springframework.graphql.data.method.annotation.QueryMapping;
|
||||
import org.springframework.graphql.data.method.annotation.SchemaMapping;
|
||||
import org.springframework.stereotype.Controller;
|
||||
|
||||
import static io.papermc.hangar.components.query.QueryBuilder.getActiveQueryBuilder;
|
||||
import static io.papermc.hangar.components.query.QueryHelper.EMPTY;
|
||||
import static io.papermc.hangar.components.query.QueryHelper.avatarUrl;
|
||||
import static io.papermc.hangar.components.query.QueryHelper.join;
|
||||
import static io.papermc.hangar.components.query.QueryHelper.query;
|
||||
import static io.papermc.hangar.components.query.QueryHelper.selectField;
|
||||
|
||||
@Controller
|
||||
@ConditionalOnProperty("enable-graphql")
|
||||
public class QueryMappings {
|
||||
|
||||
private final FileService fileService;
|
||||
private final AvatarService avatarService;
|
||||
|
||||
public QueryMappings(final FileService fileService, final AvatarService avatarService) {
|
||||
this.fileService = fileService;
|
||||
this.avatarService = avatarService;
|
||||
}
|
||||
|
||||
// queries
|
||||
@QueryMapping
|
||||
public Object projectBySlug(final DataFetchingEnvironment environment) {
|
||||
final String segmentName = environment.getExecutionStepInfo().getPath().getSegmentName();
|
||||
return query(environment, "projects", STR."WHERE \{segmentName}.slug = :slug");
|
||||
}
|
||||
|
||||
@QueryMapping
|
||||
public Object projects(final DataFetchingEnvironment environment) {
|
||||
return query(environment, "projects");
|
||||
}
|
||||
|
||||
@QueryMapping
|
||||
public Object users(final DataFetchingEnvironment environment) {
|
||||
return query(environment, "users");
|
||||
}
|
||||
|
||||
// joins
|
||||
@SchemaMapping(typeName = "User", field = "projects")
|
||||
public Object userProjects(final DataFetchingEnvironment environment) {
|
||||
return join(environment, "projects", "projects", "owner_id", "id");
|
||||
}
|
||||
|
||||
@SchemaMapping(typeName = "Project", field = "owner")
|
||||
public Object projectOwner(final DataFetchingEnvironment environment) {
|
||||
return join(environment, "users", "owner", "id", "owner_id");
|
||||
}
|
||||
|
||||
@SchemaMapping(typeName = "Project", field = "pages")
|
||||
public Object projectPages(final DataFetchingEnvironment environment) {
|
||||
return join(environment, "project_pages", "pages", "project_id", "id");
|
||||
}
|
||||
|
||||
// special schemas
|
||||
@SchemaMapping(typeName = "Project", field = "avatarUrl")
|
||||
public Object projectAvatarUrl(final DataFetchingEnvironment environment) {
|
||||
return avatarUrl(environment, this.fileService, this.avatarService, AvatarService.PROJECT);
|
||||
}
|
||||
|
||||
@SchemaMapping(typeName = "User", field = "avatarUrl")
|
||||
public Object userUrl(final DataFetchingEnvironment environment) {
|
||||
return avatarUrl(environment, this.fileService, this.avatarService, AvatarService.USER);
|
||||
}
|
||||
|
||||
@SchemaMapping(typeName = "Project", field = "namespace")
|
||||
public Object projectNamespace(final DataFetchingEnvironment environment) {
|
||||
selectField(environment, "", "owner", "owner_name", "namespace_owner");
|
||||
selectField(environment, "", "slug", "slug", "namespace_slug");
|
||||
return null; // no need to dig deeper
|
||||
}
|
||||
|
||||
@SchemaMapping(typeName = "Project", field = "homepage")
|
||||
public Object projectHomepage(final DataFetchingEnvironment environment) {
|
||||
final QueryBuilder queryBuilder = getActiveQueryBuilder(environment.getGraphQlContext());
|
||||
final String parentAlias = PrefixUtil.getParentAlias(environment.getExecutionStepInfo().getParent(), queryBuilder);
|
||||
join(environment, "project_home_pages", "homepage_id", "project_id", "id");
|
||||
join(environment, "project_pages", "homepage", "id", "page_id", parentAlias + "homepage_id.");
|
||||
return EMPTY;
|
||||
}
|
||||
|
||||
@SchemaMapping(typeName = "Project", field = "stats")
|
||||
public Object projectStats(final DataFetchingEnvironment environment) {
|
||||
join(environment, "home_projects", "extra", "id", "id");
|
||||
selectField(environment, "_extra", "stars", "stars", "stats_stars");
|
||||
selectField(environment, "_extra", "watchers", "watchers", "stats_watchers");
|
||||
selectField(environment, "_extra", "views", "views", "stats_views");
|
||||
selectField(environment, "_extra", "downloads", "downloads", "stats_downloads");
|
||||
selectField(environment, "_extra", "recentViews", "recent_views", "stats_recentViews");
|
||||
selectField(environment, "_extra", "recentDownloads", "recent_downloads", "stats_recentDownloads");
|
||||
return null; // no need to dig deeper
|
||||
}
|
||||
}
|
@ -1,172 +0,0 @@
|
||||
package io.papermc.hangar.components.query;
|
||||
|
||||
import graphql.schema.GraphQLFieldDefinition;
|
||||
import graphql.schema.GraphQLList;
|
||||
import graphql.schema.GraphQLObjectType;
|
||||
import graphql.schema.GraphQLOutputType;
|
||||
import graphql.schema.GraphQLScalarType;
|
||||
import graphql.schema.GraphQLSchema;
|
||||
import java.util.ArrayList;
|
||||
import java.util.HashMap;
|
||||
import java.util.HashSet;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
|
||||
@SuppressWarnings("UseOfSystemOutOrSystemErr")
|
||||
public class QueryMerger {
|
||||
|
||||
private final GraphQLSchema schema;
|
||||
|
||||
public QueryMerger(final GraphQLSchema schema) {
|
||||
this.schema = schema;
|
||||
}
|
||||
|
||||
public Map<String, Object> merge(final List<Map<String, String>> input) {
|
||||
final Map<String, Object> result = new HashMap<>();
|
||||
try {
|
||||
this.merge(input, result, "query", "");
|
||||
} catch (final Exception e) {
|
||||
System.out.println("error while merging");
|
||||
e.printStackTrace();
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
private void merge(final List<Map<String, String>> input, final Map<String, Object> result, final String parentKey, final String logPrefix) {
|
||||
System.out.println(logPrefix + "input: " + input);
|
||||
System.out.println(logPrefix + "result: " + result);
|
||||
|
||||
// find common sub-keys in input
|
||||
boolean foundOneSub = false;
|
||||
final Set<String> commonKeys = new HashSet<>();
|
||||
for (final String key : input.getFirst().keySet()) {
|
||||
String shortenedKey = key;
|
||||
if (key.contains("_")) {
|
||||
foundOneSub = true;
|
||||
shortenedKey = key.substring(0, key.indexOf("_"));
|
||||
}
|
||||
commonKeys.add(shortenedKey);
|
||||
}
|
||||
|
||||
// check if we really need to go one level deeper
|
||||
if (!foundOneSub) {
|
||||
System.out.println(logPrefix + "no sub keys found");
|
||||
for (final String key : input.getFirst().keySet()) {
|
||||
result.put(key, input.getFirst().get(key));
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
||||
System.out.println(logPrefix + "commonKeys: " + commonKeys);
|
||||
|
||||
// create structure
|
||||
for (final String commonKey : commonKeys) {
|
||||
final Map<String, Map<String, Object>> current = new HashMap<>();
|
||||
result.put(commonKey, current);
|
||||
|
||||
System.out.println(logPrefix + "check common key " + commonKey);
|
||||
|
||||
final Map<String, List<Map<String, String>>> newInputs = new HashMap<>();
|
||||
|
||||
// find primary keys
|
||||
for (final Map<String, String> row : input) {
|
||||
final Map<String, String> others = new HashMap<>();
|
||||
Map<String, Object> newResult;
|
||||
String pkValue = null;
|
||||
for (final String key : row.keySet()) {
|
||||
if (!key.startsWith(commonKey)) continue;
|
||||
final String pk = "name"; // TODO generic
|
||||
if (key.equals(commonKey + "_" + pk)) {
|
||||
pkValue = row.get(key);
|
||||
System.out.println(logPrefix + "primary key: " + pkValue);
|
||||
newResult = current.computeIfAbsent(pkValue, dum -> new HashMap<>());
|
||||
newResult.put(pk, row.get(key));
|
||||
} else {
|
||||
final String shortenedKey = key.replaceFirst(commonKey + "_", "");
|
||||
System.out.println(logPrefix + "not primary key: " + shortenedKey);
|
||||
others.put(shortenedKey, row.get(key));
|
||||
}
|
||||
}
|
||||
|
||||
if (pkValue == null) {
|
||||
System.out.println(logPrefix + "no primary key found: " + others);
|
||||
result.put(commonKey, others);
|
||||
} else {
|
||||
System.out.println(logPrefix + "others: " + others);
|
||||
newInputs.computeIfAbsent(pkValue, dum -> new ArrayList<>()).add(others);
|
||||
}
|
||||
}
|
||||
|
||||
System.out.println(logPrefix + "new inputs: " + newInputs);
|
||||
|
||||
for (final String key : newInputs.keySet()) {
|
||||
System.out.println(logPrefix + " recurse: " + key);
|
||||
this.merge(newInputs.get(key), current.get(key), parentKey + "_" + commonKey, logPrefix + " ");
|
||||
}
|
||||
}
|
||||
|
||||
// flatten map<primary key, values> to list<values>
|
||||
for (final String key : result.keySet()) {
|
||||
final Object entry = result.get(key);
|
||||
if (entry instanceof final Map map) {
|
||||
final GraphQLFieldDefinition fieldDefinition = this.getFieldDefinition(parentKey + "_" + key);
|
||||
if (fieldDefinition.getType() instanceof GraphQLList) {
|
||||
// lists get flattened
|
||||
result.put(key, map.values());
|
||||
} else if (fieldDefinition.getType() instanceof final GraphQLObjectType objectType) {
|
||||
// virtual objects stay as map
|
||||
if (objectType.getFieldDefinition("_virtual") == null) {
|
||||
// normal objects should be a single value
|
||||
result.put(key, map.values().stream().findFirst().orElseThrow());
|
||||
}
|
||||
} else if (fieldDefinition.getType() instanceof GraphQLScalarType) {
|
||||
// just get the scalar
|
||||
result.put(key, map.get(key));
|
||||
} else {
|
||||
throw new RuntimeException("should never reach " + parentKey + "_" + key);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
System.out.println(logPrefix + "result: " + result);
|
||||
}
|
||||
|
||||
private GraphQLFieldDefinition getFieldDefinition(final String key) {
|
||||
final String[] parts = key.split("_");
|
||||
GraphQLOutputType type = this.schema.getQueryType();
|
||||
GraphQLFieldDefinition queryFieldDefinition = null;
|
||||
outer:
|
||||
for (int i = 1; i < parts.length; i++) {
|
||||
if (type instanceof final GraphQLObjectType objectType) {
|
||||
for (final GraphQLFieldDefinition fieldDefinition : objectType.getFieldDefinitions()) {
|
||||
if (fieldDefinition.getName().equalsIgnoreCase(parts[i])) {
|
||||
queryFieldDefinition = fieldDefinition;
|
||||
type = queryFieldDefinition.getType();
|
||||
continue outer;
|
||||
}
|
||||
}
|
||||
System.out.println("no field found: " + parts[i]);
|
||||
} else if (type instanceof final GraphQLList list) {
|
||||
var newType = list.getWrappedType();
|
||||
if (newType instanceof final GraphQLObjectType objectType) {
|
||||
for (final GraphQLFieldDefinition fieldDefinition : objectType.getFieldDefinitions()) {
|
||||
if (fieldDefinition.getName().equalsIgnoreCase(parts[i])) {
|
||||
queryFieldDefinition = fieldDefinition;
|
||||
type = queryFieldDefinition.getType();
|
||||
continue outer;
|
||||
}
|
||||
}
|
||||
System.out.println("no list field found: " + parts[i]);
|
||||
return GraphQLFieldDefinition.newFieldDefinition().name("Dummy").type(GraphQLObjectType.newObject().name("Dummy")).build();
|
||||
} else {
|
||||
System.out.println("unknown list type: " + type);
|
||||
}
|
||||
} else {
|
||||
System.out.println("unknown type: " + type);
|
||||
}
|
||||
}
|
||||
return queryFieldDefinition;
|
||||
}
|
||||
|
||||
}
|
@ -41,10 +41,6 @@ spring:
|
||||
secrets:
|
||||
enableApi: true
|
||||
|
||||
graphql:
|
||||
graphiql:
|
||||
enabled: true
|
||||
|
||||
# Enable this if you want to regenerate the frontend types
|
||||
#springdoc:
|
||||
# use-fqn: true
|
||||
@ -262,7 +258,6 @@ logging:
|
||||
io.papermc.hangar.service.internal.JobService: DEBUG
|
||||
http-client-logger: INFO
|
||||
io.papermc.hangar.service.ReplicationService: DEBUG
|
||||
# graphql: TRACE
|
||||
# org.springframework.cache: TRACE
|
||||
# org.springframework.beans: TRACE
|
||||
# io.papermc.hangar.security: TRACE
|
||||
|
@ -1,68 +0,0 @@
|
||||
type Query {
|
||||
projectBySlug(slug: String!): Project
|
||||
projects: [Project]
|
||||
users: [User]
|
||||
}
|
||||
|
||||
type Project {
|
||||
createdAt: String
|
||||
id: Int
|
||||
name: String
|
||||
namespace: ProjectNamespace
|
||||
stats: ProjectStats
|
||||
category: String
|
||||
lastUpdated: String
|
||||
visibility: String
|
||||
avatarUrl: String
|
||||
owner: User
|
||||
homepage: ProjectPage
|
||||
pages: [ProjectPage]
|
||||
}
|
||||
|
||||
type User {
|
||||
id: Int
|
||||
uuid: String
|
||||
createdAt: String
|
||||
name: String
|
||||
email: String
|
||||
tagline: String
|
||||
read_prompts: [Int]
|
||||
locked: Boolean
|
||||
language: String
|
||||
theme: String
|
||||
email_verified: Boolean
|
||||
social: String
|
||||
avatarUrl: String
|
||||
projects: [Project]
|
||||
}
|
||||
|
||||
type ProjectPage {
|
||||
id: Int
|
||||
createdAt: String
|
||||
project: Project
|
||||
name: String
|
||||
slug: String
|
||||
contents: String
|
||||
deletable: Boolean
|
||||
parent: ProjectPage
|
||||
}
|
||||
|
||||
type ProjectStats implements Virtual {
|
||||
_virtual: Boolean
|
||||
views: Int
|
||||
downloads: Int
|
||||
recentViews: Int
|
||||
recentDownloads: Int
|
||||
stars: Int
|
||||
watchers: Int
|
||||
}
|
||||
|
||||
type ProjectNamespace implements Virtual {
|
||||
_virtual: Boolean
|
||||
owner: String
|
||||
slug: String
|
||||
}
|
||||
|
||||
interface Virtual {
|
||||
_virtual: Boolean
|
||||
}
|
@ -1,367 +0,0 @@
|
||||
package io.papermc.hangar.components.query;
|
||||
|
||||
import com.fasterxml.jackson.core.JsonProcessingException;
|
||||
import com.fasterxml.jackson.databind.ObjectMapper;
|
||||
import com.fasterxml.jackson.databind.ObjectWriter;
|
||||
import com.fasterxml.jackson.databind.SerializationFeature;
|
||||
import graphql.schema.idl.SchemaGenerator;
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.util.ArrayList;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import org.junit.jupiter.api.BeforeAll;
|
||||
import org.junit.jupiter.api.Test;
|
||||
import org.junit.jupiter.api.condition.DisabledIfEnvironmentVariable;
|
||||
|
||||
import static org.junit.jupiter.api.Assertions.*;
|
||||
|
||||
class QueryMergerTest {
|
||||
|
||||
private static QueryMerger merger;
|
||||
|
||||
@BeforeAll
|
||||
static void setup() {
|
||||
try (final InputStream resourceAsStream = QueryMergerTest.class.getClassLoader().getResourceAsStream("graphql/schema.graphqls")) {
|
||||
assert resourceAsStream != null;
|
||||
merger = new QueryMerger(SchemaGenerator.createdMockedSchema(new String(resourceAsStream.readAllBytes())));
|
||||
} catch (final IOException e) {
|
||||
throw new RuntimeException(e);
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
void mergeOne() throws JsonProcessingException {
|
||||
final List<Map<String, String>> input = new ArrayList<>();
|
||||
input.add(Map.of(
|
||||
"projects_name", "Test2",
|
||||
"projects_dum", "dum",
|
||||
"projects_pages_name", "Resource Page",
|
||||
"projects_homepage_name", "Resource Page",
|
||||
"projects_pages_contents", "# Test2 Welcome to your new project!",
|
||||
"projects_homepage_contents", "# Test2 Welcome to your new project!"
|
||||
));
|
||||
final Map<String, Object> expected = Map.of(
|
||||
"projects", List.of(
|
||||
Map.of(
|
||||
"dum", "dum",
|
||||
"name", "Test2",
|
||||
"pages", List.of(
|
||||
Map.of("name", "Resource Page", "contents", "# Test2 Welcome to your new project!")
|
||||
),
|
||||
"homepage", Map.of(
|
||||
"name", "Resource Page",
|
||||
"contents", "# Test2 Welcome to your new project!"
|
||||
)
|
||||
)
|
||||
)
|
||||
);
|
||||
|
||||
compare(expected, merger.merge(input));
|
||||
}
|
||||
|
||||
@Test
|
||||
void mergeHomepage() throws JsonProcessingException {
|
||||
final List<Map<String, String>> input = new ArrayList<>();
|
||||
input.add(Map.of(
|
||||
"projects_name", "Test2",
|
||||
"projects_homepage_name", "Resource Page"
|
||||
));
|
||||
final Map<String, Object> expected = Map.of(
|
||||
"projects", List.of(
|
||||
Map.of(
|
||||
"name", "Test2",
|
||||
"homepage", Map.of(
|
||||
"name", "Resource Page"
|
||||
)
|
||||
)
|
||||
)
|
||||
);
|
||||
|
||||
compare(expected, merger.merge(input));
|
||||
}
|
||||
|
||||
@Test
|
||||
void mergeTwoChild() throws JsonProcessingException {
|
||||
final List<Map<String, String>> input = new ArrayList<>();
|
||||
input.add(Map.of(
|
||||
"projects_name", "Test",
|
||||
"projects_pages_name", "Test",
|
||||
"projects_homepage_name", "Resource Page",
|
||||
"projects_pages_contents", "# Test Welcome to your new page",
|
||||
"projects_homepage_contents", "# Test Welcome to your new project!"
|
||||
));
|
||||
input.add(Map.of(
|
||||
"projects_name", "Test",
|
||||
"projects_pages_name", "Resource Page",
|
||||
"projects_homepage_name", "Resource Page",
|
||||
"projects_pages_contents", "# Test Welcome to your new project!",
|
||||
"projects_homepage_contents", "# Test Welcome to your new project!"
|
||||
));
|
||||
|
||||
final Map<String, Object> expected = Map.of(
|
||||
"projects", List.of(
|
||||
Map.of(
|
||||
"name", "Test",
|
||||
"pages", List.of(
|
||||
Map.of("name", "Test", "contents", "# Test Welcome to your new page"),
|
||||
Map.of("name", "Resource Page", "contents", "# Test Welcome to your new project!")
|
||||
),
|
||||
"homepage", Map.of(
|
||||
"name", "Resource Page",
|
||||
"contents", "# Test Welcome to your new project!"
|
||||
)
|
||||
)
|
||||
)
|
||||
);
|
||||
|
||||
compare(expected, merger.merge(input));
|
||||
}
|
||||
|
||||
@Test
|
||||
void mergeTwoParent() throws JsonProcessingException {
|
||||
final List<Map<String, String>> input = new ArrayList<>();
|
||||
input.add(Map.of(
|
||||
"projects_name", "Test",
|
||||
"projects_pages_name", "Test",
|
||||
"projects_homepage_name", "Resource Page",
|
||||
"projects_pages_contents", "# Test Welcome to your new page",
|
||||
"projects_homepage_contents", "# Test Welcome to your new project!"
|
||||
));
|
||||
input.add(Map.of(
|
||||
"projects_name", "Test",
|
||||
"projects_pages_name", "Resource Page",
|
||||
"projects_homepage_name", "Resource Page",
|
||||
"projects_pages_contents", "# Test Welcome to your new project!",
|
||||
"projects_homepage_contents", "# Test Welcome to your new project!"
|
||||
));
|
||||
input.add(Map.of(
|
||||
"projects_name", "Test2",
|
||||
"projects_pages_name", "Resource Page",
|
||||
"projects_homepage_name", "Resource Page",
|
||||
"projects_pages_contents", "# Test2 Welcome to your new project!",
|
||||
"projects_homepage_contents", "# Test2 Welcome to your new project!"
|
||||
));
|
||||
|
||||
final Map<String, Object> expected = Map.of(
|
||||
"projects", List.of(
|
||||
Map.of(
|
||||
"name", "Test",
|
||||
"pages", List.of(
|
||||
Map.of("name", "Test", "contents", "# Test Welcome to your new page"),
|
||||
Map.of("name", "Resource Page", "contents", "# Test Welcome to your new project!")
|
||||
),
|
||||
"homepage", Map.of(
|
||||
"name", "Resource Page",
|
||||
"contents", "# Test Welcome to your new project!"
|
||||
)
|
||||
),
|
||||
Map.of(
|
||||
"name", "Test2",
|
||||
"pages", List.of(
|
||||
Map.of("name", "Resource Page", "contents", "# Test2 Welcome to your new project!")
|
||||
),
|
||||
"homepage", Map.of(
|
||||
"name", "Resource Page",
|
||||
"contents", "# Test2 Welcome to your new project!"
|
||||
)
|
||||
)
|
||||
)
|
||||
);
|
||||
|
||||
compare(expected, merger.merge(input));
|
||||
}
|
||||
|
||||
@Test
|
||||
void mergeNoPrimaryKeyNamespace() throws JsonProcessingException {
|
||||
final List<Map<String, String>> input = new ArrayList<>();
|
||||
input.add(Map.of(
|
||||
"projectbyslug_name", "Test",
|
||||
"projectbyslug_namespace_slug", "Test",
|
||||
"projectbyslug_namespace_owner", "MiniDigger"
|
||||
));
|
||||
|
||||
final Map<String, Object> expected = Map.of(
|
||||
"projectbyslug", Map.of(
|
||||
"name", "Test",
|
||||
"namespace", Map.of(
|
||||
"owner", "MiniDigger",
|
||||
"slug", "Test"
|
||||
)
|
||||
)
|
||||
);
|
||||
|
||||
compare(expected, merger.merge(input));
|
||||
}
|
||||
|
||||
// TODO solve these by always adding PKs to query on join
|
||||
@Test
|
||||
@DisabledIfEnvironmentVariable(named = "CI", matches = "true")
|
||||
void mergeNoPrimaryKey() throws JsonProcessingException {
|
||||
final List<Map<String, String>> input = new ArrayList<>();
|
||||
input.add(Map.of("projects_id", "1"));
|
||||
input.add(Map.of("projects_id", "2"));
|
||||
|
||||
final Map<String, Object> expected = Map.of(
|
||||
"projects", List.of(
|
||||
Map.of("id", "1"),
|
||||
Map.of("id", "2")
|
||||
)
|
||||
);
|
||||
|
||||
compare(expected, merger.merge(input));
|
||||
}
|
||||
|
||||
@Test
|
||||
@DisabledIfEnvironmentVariable(named = "CI", matches = "true")
|
||||
void mergeNoPrimaryKey2() throws JsonProcessingException {
|
||||
final List<Map<String, String>> input = new ArrayList<>();
|
||||
input.add(Map.of("projects_id", "1", "projects_owner_name", "MiniDigger"));
|
||||
input.add(Map.of("projects_id", "2", "projects_owner_name", "MiniDigger"));
|
||||
|
||||
final Map<String, Object> expected = Map.of(
|
||||
"projects", List.of(
|
||||
Map.of("id", "1", "owner", Map.of("name", "MiniDigger")),
|
||||
Map.of("id", "2", "owner", Map.of("name", "MiniDigger"))
|
||||
)
|
||||
);
|
||||
|
||||
compare(expected, merger.merge(input));
|
||||
}
|
||||
|
||||
@Test
|
||||
@DisabledIfEnvironmentVariable(named = "CI", matches = "true")
|
||||
void mergeNoPrimaryKey3() throws JsonProcessingException {
|
||||
final List<Map<String, String>> input = new ArrayList<>();
|
||||
input.add(Map.of("projects_name", "Test", "projects_owner_email", "Dum", "projects_owner_id", "1"));
|
||||
input.add(Map.of("projects_name", "Test2", "projects_owner_email", "Dum", "projects_owner_id", "1"));
|
||||
|
||||
final Map<String, Object> expected = Map.of(
|
||||
"projects", List.of(
|
||||
Map.of("name", "Test", "owner", Map.of("email", "Dum", "id", "1")),
|
||||
Map.of("name", "Test2", "owner", Map.of("email", "Dum", "id", "1"))
|
||||
)
|
||||
);
|
||||
|
||||
compare(expected, merger.merge(input));
|
||||
}
|
||||
|
||||
@Test
|
||||
@DisabledIfEnvironmentVariable(named = "CI", matches = "true")
|
||||
void mergeEmptyProjects() throws JsonProcessingException {
|
||||
final List<Map<String, String>> input = new ArrayList<>();
|
||||
final Map<String, String> thing = new HashMap<>();
|
||||
thing.put("users_name", "JarScanner");
|
||||
thing.put("users_projects_name", null);
|
||||
thing.put("users_projects_stats_stars", null);
|
||||
input.add(thing);
|
||||
|
||||
final Map<String, Object> expected = Map.of(
|
||||
"users", List.of(Map.of(
|
||||
"projects", List.of(),
|
||||
"name", "JarScanner"
|
||||
)));
|
||||
|
||||
compare(expected, merger.merge(input));
|
||||
}
|
||||
|
||||
@Test
|
||||
void mergeDeep() throws JsonProcessingException {
|
||||
final List<Map<String, String>> input = new ArrayList<>();
|
||||
input.add(Map.of(
|
||||
"projects_name", "Test",
|
||||
"projects_owner_name", "MiniDigger",
|
||||
"projects_owner_projects_name", "Test"
|
||||
));
|
||||
input.add(Map.of(
|
||||
"projects_name", "Test",
|
||||
"projects_owner_name", "MiniDigger",
|
||||
"projects_owner_projects_name", "Test2"
|
||||
));
|
||||
input.add(Map.of(
|
||||
"projects_name", "Test2",
|
||||
"projects_owner_name", "MiniDigger",
|
||||
"projects_owner_projects_name", "Test"
|
||||
));
|
||||
input.add(Map.of(
|
||||
"projects_name", "Test2",
|
||||
"projects_owner_name", "MiniDigger",
|
||||
"projects_owner_projects_name", "Test2"
|
||||
));
|
||||
|
||||
final Map<String, Object> expected = Map.of(
|
||||
"projects", List.of(
|
||||
Map.of("name", "Test", "owner", Map.of("name", "MiniDigger", "projects", List.of(
|
||||
Map.of("name", "Test"),
|
||||
Map.of("name", "Test2")
|
||||
))),
|
||||
Map.of("name", "Test2", "owner", Map.of("name", "MiniDigger", "projects", List.of(
|
||||
Map.of("name", "Test"),
|
||||
Map.of("name", "Test2")
|
||||
)))
|
||||
)
|
||||
);
|
||||
|
||||
compare(expected, merger.merge(input));
|
||||
}
|
||||
|
||||
@Test
|
||||
void mergeList() throws JsonProcessingException {
|
||||
final List<Map<String, String>> input = new ArrayList<>();
|
||||
input.add(Map.of(
|
||||
"users_name", "MiniDigger"
|
||||
));
|
||||
input.add(Map.of(
|
||||
"users_name", "JarScanner"
|
||||
));
|
||||
|
||||
final Map<String, Object> expected = Map.of(
|
||||
"users", List.of(
|
||||
Map.of("name", "MiniDigger"),
|
||||
Map.of("name", "JarScanner")
|
||||
)
|
||||
);
|
||||
|
||||
compare(expected, merger.merge(input));
|
||||
}
|
||||
|
||||
@Test
|
||||
void mergeScalarLeftJoin() throws JsonProcessingException {
|
||||
final List<Map<String, String>> input = new ArrayList<>();
|
||||
final Map<String, String> dum = new HashMap<>();
|
||||
dum.put("users_projects_name", null);
|
||||
dum.put("users_name", "JarScanner");
|
||||
dum.put("users_id", "1");
|
||||
input.add(dum);
|
||||
input.add(Map.of(
|
||||
"users_projects_name", "Test",
|
||||
"users_name", "MiniDigger",
|
||||
"users_id", "2"
|
||||
));
|
||||
input.add(Map.of(
|
||||
"users_projects_name", "Test2",
|
||||
"users_name", "MiniDigger",
|
||||
"users_id", "2"
|
||||
));
|
||||
|
||||
final Map<String, Object> expected = Map.of(
|
||||
"users", List.of(
|
||||
Map.of("name", "MiniDigger", "id", "2", "projects", List.of(Map.of("name", "Test"), Map.of("name", "Test2"))),
|
||||
Map.of("name", "JarScanner", "id", "1", "projects", List.of())
|
||||
)
|
||||
);
|
||||
|
||||
compare(expected, merger.merge(input));
|
||||
}
|
||||
|
||||
private static void compare(final Map<String, Object> expected, final Map<String, Object> output) throws JsonProcessingException {
|
||||
final ObjectMapper objectMapper = new ObjectMapper();
|
||||
objectMapper.configure(SerializationFeature.ORDER_MAP_ENTRIES_BY_KEYS, true);
|
||||
|
||||
final ObjectWriter objectWriter = objectMapper.writerWithDefaultPrettyPrinter();
|
||||
objectWriter.writeValueAsString(expected);
|
||||
|
||||
assertEquals(objectWriter.writeValueAsString(expected), objectWriter.writeValueAsString(output));
|
||||
}
|
||||
}
|
Loading…
Reference in New Issue
Block a user