Skip to content

Commit

Permalink
0.0.3
Browse files Browse the repository at this point in the history
  • Loading branch information
babyfish-ct committed Mar 26, 2022
1 parent c2a8f16 commit b8b554d
Show file tree
Hide file tree
Showing 4 changed files with 22 additions and 7 deletions.
2 changes: 1 addition & 1 deletion example/build.gradle.kts
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@ repositories {
}

dependencies {
implementation("org.babyfish.graphql.provider:graphql-provider-starter-dgs:0.0.2")
implementation("org.babyfish.graphql.provider:graphql-provider-starter-dgs:0.0.3")
ksp("org.babyfish.kimmer:kimmer-ksp:0.3.0")
runtimeOnly("io.r2dbc:r2dbc-h2:0.8.5.RELEASE")
}
Expand Down
2 changes: 1 addition & 1 deletion project/build.gradle.kts
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
allprojects {
group = "org.babyfish.graphql.provider"
version = "0.0.2"
version = "0.0.3"
}
Original file line number Diff line number Diff line change
Expand Up @@ -79,15 +79,17 @@ open class DataFetchers(
}.toFuture() as CompletableFuture<Any?>
}
val entity = env.getSource<Entity<*>>()
val idOnly = env.selectionSet.fields.let {
it.size == 1 && it[0].name == "id"
}
if (prop.isReference && prop.storage is Column && Immutable.isLoaded(entity, prop.immutableProp)) {
val parent = Immutable.get(entity, prop.immutableProp) as Entity<*>?
if (parent === null) {
return CompletableFuture.completedFuture(null)
}
val parentId = Immutable.get(parent, prop.targetType!!.idProp.immutableProp)
if (env.arguments.isEmpty()) {
val fields = env.selectionSet.fields
if (fields.size == 1 && fields[0].name == "id") {
if (idOnly) {
return CompletableFuture.completedFuture(
produce(prop.targetType!!.kotlinType) {
Draft.set(this, prop.targetType!!.idProp.immutableProp, parentId)
Expand All @@ -97,7 +99,7 @@ open class DataFetchers(
}
return env.loaderByParentId(prop).load(parentId)
} else {
val future = env.loaderById(prop).load(entity.id)
val future = env.loaderById(prop, idOnly).load(entity.id)
if (prop.isReference) {
return future.thenApply { it.firstOrNull() }
}
Expand Down Expand Up @@ -175,7 +177,7 @@ open class DataFetchers(
}
}

private fun DataFetchingEnvironment.loaderById(prop: ModelProp): DataLoader<Any, List<Any>> {
private fun DataFetchingEnvironment.loaderById(prop: ModelProp, idOnly: Boolean): DataLoader<Any, List<Any>> {
val dataLoaderKey = "graphql-provider:loader-by-id:${prop}"
return dataLoaderRegistry.computeIfAbsent(dataLoaderKey) {
DataLoaderFactory.newMappedDataLoader(
Expand All @@ -185,7 +187,7 @@ open class DataFetchers(
applyFilter(prop, it)
}
else ->
ManyToManyBatchLoader(r2dbcClient, prop) {
ManyToManyBatchLoader(r2dbcClient, prop, idOnly) {
applyFilter(prop, it)
}
},
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,8 @@ import org.babyfish.graphql.provider.meta.ModelProp
import org.babyfish.graphql.provider.runtime.ArgumentsConverter
import org.babyfish.graphql.provider.runtime.FakeID
import org.babyfish.graphql.provider.runtime.R2dbcClient
import org.babyfish.kimmer.Draft
import org.babyfish.kimmer.produce
import org.babyfish.kimmer.sql.Entity
import org.babyfish.kimmer.sql.ast.query.MutableRootQuery
import org.babyfish.kimmer.sql.ast.valueIn
Expand All @@ -22,6 +24,7 @@ import kotlin.reflect.KClass
internal class ManyToManyBatchLoader(
private val r2dbcClient: R2dbcClient,
private val prop: ModelProp,
private val idOnly: Boolean,
private val filterApplier: (MutableRootQuery<Entity<FakeID>, FakeID>) -> Unit
) : MappedBatchLoader<Any, List<Any>> {

Expand All @@ -40,6 +43,16 @@ internal class ManyToManyBatchLoader(
val idMap = pairs.groupBy({it.first!!}) {
it.second!!
}
if (idOnly && prop.filter === null) {
val targetType = prop.targetType!!
return idMap.mapValues { entry ->
entry.value.map {
produce(targetType.kotlinType) {
Draft.set(this, targetType.idProp.immutableProp, it)
}
}
}
}
val allTargetIds = pairs.map { it.second }.distinct()
val rows =
r2dbcClient.query(
Expand Down

0 comments on commit b8b554d

Please sign in to comment.