Skip to content

Commit

Permalink
Dataloader support (#130)
Browse files Browse the repository at this point in the history
  • Loading branch information
DmitryTsepelev authored Jan 11, 2025
1 parent 91704e9 commit 743b542
Show file tree
Hide file tree
Showing 8 changed files with 128 additions and 16 deletions.
1 change: 1 addition & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@

## master

- [PR#130](https://github.com/DmitryTsepelev/graphql-ruby-fragment_cache/pull/130) Dataloader support ([@DmitryTsepelev][])
- [PR#125](https://github.com/DmitryTsepelev/graphql-ruby-fragment_cache/pull/125) Introduce cache lookup instrumentation hook ([@danielhartnell][])

## 1.20.5 (2024-11-02)
Expand Down
28 changes: 28 additions & 0 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -381,6 +381,34 @@ class QueryType < BaseObject
end
```

## Dataloader

If you are using [Dataloader](https://graphql-ruby.org/dataloader/overview.html), you will need to let the gem know using `dataloader: true`:

```ruby
class PostType < BaseObject
field :author, User, null: false

def author
cache_fragment(dataloader: true) do
dataloader.with(AuthorDataloaderSource).load(object.id)
end
end
end

# or

class PostType < BaseObject
field :author, User, null: false, cache_fragment: {dataloader: true}

def author
dataloader.with(AuthorDataloaderSource).load(object.id)
end
end
```

The problem is that I didn't find a way to detect that dataloader (and, therefore, Fiber) is used, and the block is forced to resolve, causing the N+1 inside the Dataloader Source class.

## How to use `#cache_fragment` in extensions (and other places where context is not available)

If you want to call `#cache_fragment` from places other that fields or resolvers, you'll need to pass `context` explicitly and turn on `raw_value` support. For instance, let's take a look at this extension:
Expand Down
9 changes: 3 additions & 6 deletions lib/graphql/fragment_cache/fragment.rb
Original file line number Diff line number Diff line change
Expand Up @@ -16,12 +16,10 @@ def read_multi(fragments)
return fragments.map { |f| [f, f.read] }.to_h
end

fragments_to_cache_keys = fragments
.map { |f| [f, f.cache_key] }.to_h
fragments_to_cache_keys = fragments.map { |f| [f, f.cache_key] }.to_h

# Filter out all the cache_keys for fragments with renew_cache: true in their context
cache_keys = fragments_to_cache_keys
.reject { |k, _v| k.context[:renew_cache] == true }.values
cache_keys = fragments_to_cache_keys.reject { |k, _v| k.context[:renew_cache] == true }.values

# If there are cache_keys look up values with read_multi otherwise return an empty hash
cache_keys_to_values = if cache_keys.empty?
Expand All @@ -46,8 +44,7 @@ def read_multi(fragments)
end

# Fragmenst without values or with renew_cache: true in their context will have nil values like the read method
fragments_to_cache_keys
.map { |fragment, cache_key| [fragment, cache_keys_to_values[cache_key]] }.to_h
fragments_to_cache_keys.map { |fragment, cache_key| [fragment, cache_keys_to_values[cache_key]] }.to_h
end
end

Expand Down
11 changes: 11 additions & 0 deletions lib/graphql/fragment_cache/schema/lazy_cache_resolver.rb
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,8 @@ def initialize(fragment, query_ctx, object_to_cache, &block)
@block = block

@lazy_state[:pending_fragments] << @fragment

ensure_dataloader_resulution! if @fragment.options[:dataloader]
end

def resolve
Expand All @@ -35,6 +37,15 @@ def resolve
@query_ctx.fragments << @fragment
end
end

private

def ensure_dataloader_resulution!
return if FragmentCache.cache_store.exist?(@fragment.cache_key)

@object_to_cache = @block.call
@block = nil
end
end
end
end
Expand Down
56 changes: 56 additions & 0 deletions spec/graphql/fragment_cache/object_helpers_spec.rb
Original file line number Diff line number Diff line change
Expand Up @@ -777,6 +777,62 @@ def post(id:, expires_in: nil)
end
end

describe "caching fields with dataloader" do
let(:query) do
<<~GQL
query GetPosts {
posts {
id
dataloaderCachedAuthor {
name
}
}
}
GQL
end

let(:schema) do
build_schema do
use GraphQL::Dataloader
query(Types::Query)
end
end

let(:user1) { User.new(id: 1, name: "User #1") }
let(:user2) { User.new(id: 2, name: "User #2") }

let!(:post1) { Post.create(id: 1, title: "object test 1", author: user1) }
let!(:post2) { Post.create(id: 2, title: "object test 2", author: user2) }

let(:memory_store) { GraphQL::FragmentCache::MemoryStore.new }

before do
allow(User).to receive(:find_by_post_ids).and_call_original

# warmup cache
execute_query

# make objects dirty
user1.name = "User #1 new"
user2.name = "User #2 new"
end

it "returns cached results" do
expect(execute_query.dig("data", "posts")).to eq([
{
"id" => "1",
"dataloaderCachedAuthor" => {"name" => "User #1"}
},
{
"id" => "2",
"dataloaderCachedAuthor" => {"name" => "User #2"}
}
])

expect(User).to have_received(:find_by_post_ids).with([post1.id, post2.id]).once
end
end

describe "conditional caching" do
let(:schema) do
field_resolver = resolver
Expand Down
20 changes: 10 additions & 10 deletions spec/graphql/fragment_cache/schema/lazy_cache_resolver_spec.rb
Original file line number Diff line number Diff line change
Expand Up @@ -6,21 +6,27 @@
describe "#initialize" do
context "lazy cache resolver state management" do
let(:state_key) { :lazy_cache_resolver_statez }
let(:gql_context) { instance_double "Context" }
let(:fragment) { GraphQL::FragmentCache::Fragment.new(gql_context) }

before do
allow(gql_context).to receive(:namespace).and_return({})
end

it "adds lazy state property to the query context" do
context = {}

expect(context).not_to have_key(state_key)

GraphQL::FragmentCache::Schema::LazyCacheResolver.new(nil, context, {})
GraphQL::FragmentCache::Schema::LazyCacheResolver.new(fragment, context, {})

expect(context).to have_key(state_key)
end

it "has :pending_fragments Set in state" do
context = {}

GraphQL::FragmentCache::Schema::LazyCacheResolver.new({}, context, {})
GraphQL::FragmentCache::Schema::LazyCacheResolver.new(fragment, context, {})

expect(context[state_key]).to have_key(:pending_fragments)
expect(context[state_key][:pending_fragments]).to be_instance_of(Set)
Expand All @@ -29,7 +35,7 @@
it "has :resolved_fragments Hash in state" do
context = {}

GraphQL::FragmentCache::Schema::LazyCacheResolver.new({}, context, {})
GraphQL::FragmentCache::Schema::LazyCacheResolver.new(fragment, context, {})

expect(context[state_key]).to have_key(:resolved_fragments)
expect(context[state_key][:resolved_fragments]).to be_instance_of(Hash)
Expand All @@ -39,7 +45,7 @@
context = {}
fragments = []

3.times { fragments.push(Object.new) }
3.times { fragments.push(GraphQL::FragmentCache::Fragment.new(gql_context)) }

fragments.each do |f|
GraphQL::FragmentCache::Schema::LazyCacheResolver.new(f, context, {})
Expand All @@ -51,10 +57,4 @@
end
end
end

it "has :resolve method" do
lazy_cache_resolver = GraphQL::FragmentCache::Schema::LazyCacheResolver.new({}, {}, {})

expect(lazy_cache_resolver).to respond_to(:resolve)
end
end
6 changes: 6 additions & 0 deletions spec/support/models/user.rb
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,12 @@ class User
attr_reader :id
attr_accessor :name

class << self
def find_by_post_ids(post_ids)
post_ids.map { |id| Post.find(id).author }
end
end

def initialize(id:, name:)
@id = id
@name = name
Expand Down
13 changes: 13 additions & 0 deletions spec/support/test_schema.rb
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,12 @@ def perform(posts)
end
end

class AuthorDataloaderSource < GraphQL::Dataloader::Source
def fetch(post_ids)
User.find_by_post_ids(post_ids)
end
end

module Types
class Base < GraphQL::Schema::Object
include GraphQL::FragmentCache::Object
Expand Down Expand Up @@ -41,6 +47,7 @@ class Post < Base
field :cached_author, User, null: false
field :batched_cached_author, User, null: false
field :cached_author_inside_batch, User, null: false
field :dataloader_cached_author, User, null: false

field :meta, String, null: true

Expand All @@ -60,6 +67,12 @@ def cached_author_inside_batch
cache_fragment(author, context: context)
end
end

def dataloader_cached_author
cache_fragment(dataloader: true) do
dataloader.with(AuthorDataloaderSource).load(object.id)
end
end
end

class PostInput < GraphQL::Schema::InputObject
Expand Down

0 comments on commit 743b542

Please sign in to comment.