diff --git a/implementation_plan.md b/implementation_plan.md new file mode 100644 index 0000000..175ed64 --- /dev/null +++ b/implementation_plan.md @@ -0,0 +1,33 @@ +# Implementation Plan: Sandbox Adapter Parity + +## Goal +Make `Cache.Sandbox` behave as a drop-in replacement for Redis/ETS/DETS/ConCache adapters so all adapter-specific tests pass when `sandbox?` is enabled. + +## Discoveries (Audit) +- Redis JSON operations in sandbox return `:ok` instead of `{:ok, count}` for `json_delete/2`, `json_incr/3`, `json_clear/3`, `json_array_append/4`. +- Redis hash operations in sandbox return `:ok` for `hash_set/3`, `hash_delete/2`, and `hash_set_many/1` when adapter tests expect `{:ok, count}` or `{:ok, [counts]}` with TTL. +- ETS `info/0` in sandbox does not include `:name` which ETS tests require. +- ETS/DETS conversion APIs (`to_dets`, `from_dets`, `to_ets`, `from_ets`) return `{:error, :not_supported_in_sandbox}` but tests expect working conversions. +- ETS/DETS macros bypass sandbox because they call `:ets`/`:dets` directly even when `sandbox?` is enabled. +- ConCache-specific APIs (`get_or_store/3`, `dirty_get_or_store/2`) are missing from sandbox but tests call them. + +## Plan +1. Align Redis-style operations in `Cache.Sandbox`. + - Match return values and error tuples for hash and JSON operations. + - Ensure scan/hash_scan results match Redis adapter expectations. + +2. Make ETS/DETS APIs sandbox-aware. + - Update `Cache.ETS` and `Cache.DETS` macros to delegate to `Cache.Sandbox` when `sandbox?` is true. + - Implement missing sandbox equivalents for ETS/DETS conversion and file APIs required by tests. + - Ensure `info/0` returns the `:name` field for ETS. + +3. Implement ConCache API parity. + - Add `get_or_store/3` and `dirty_get_or_store/2` in `Cache.Sandbox` with matching semantics. + +4. Add sandbox parity tests. + - Mirror adapter-specific tests (Redis hash/JSON, ETS, DETS, ConCache) using sandbox-enabled caches. + - Keep expectations identical to adapter tests. + +## Verification +- Run adapter tests against sandbox-enabled modules. +- Confirm no warnings and that new parity tests pass. diff --git a/lib/cache/dets.ex b/lib/cache/dets.ex index d9d0a3d..40ac30a 100644 --- a/lib/cache/dets.ex +++ b/lib/cache/dets.ex @@ -30,6 +30,162 @@ defmodule Cache.DETS do defmacro __using__(_opts) do quote do + @doc """ + Returns a list of the names of all open DETS tables on this node. + """ + def all do + :dets.all() + end + + @doc """ + Returns a list of objects stored in the table (binary chunk format). + """ + def bchunk(continuation) do + :dets.bchunk(@cache_name, continuation) + end + + @doc """ + Closes the DETS table. + """ + def close do + :dets.close(@cache_name) + end + + @doc """ + Deletes all objects in the DETS table. + """ + def delete_all_objects do + :dets.delete_all_objects(@cache_name) + end + + @doc """ + Deletes the exact object from the DETS table. + """ + def delete_object(object) do + :dets.delete_object(@cache_name, object) + end + + @doc """ + Returns the first key in the table. + """ + def first do + :dets.first(@cache_name) + end + + @doc """ + Folds over all objects in the table. + """ + def foldl(function, acc) do + :dets.foldl(function, acc, @cache_name) + end + + @doc """ + Folds over all objects in the table (same as foldl for DETS). + """ + def foldr(function, acc) do + :dets.foldr(function, acc, @cache_name) + end + + @doc """ + Get information about the DETS table. + """ + def info do + :dets.info(@cache_name) + end + + @doc """ + Get specific information about the DETS table. + """ + def info(item) do + :dets.info(@cache_name, item) + end + + @doc """ + Replaces the existing objects of the table with objects created by calling the input function. + """ + def init_table(init_fun) do + :dets.init_table(@cache_name, init_fun) + end + + @doc """ + Replaces the existing objects of the table with objects created by calling the input function with options. + """ + def init_table(init_fun, options) do + :dets.init_table(@cache_name, init_fun, options) + end + + @doc """ + Insert raw data into the DETS table using the underlying :dets.insert/2 function. + """ + def insert_raw(data) do + :dets.insert(@cache_name, data) + end + + @doc """ + Same as insert/2 except returns false if any object with the same key already exists. + """ + def insert_new(data) do + :dets.insert_new(@cache_name, data) + end + + @doc """ + Returns true if it would be possible to initialize the table with bchunk data. + """ + # credo:disable-for-next-line Credo.Check.Readability.PredicateFunctionNames + def is_compatible_bchunk_format(bchunk_format) do + :dets.is_compatible_bchunk_format(@cache_name, bchunk_format) + end + + @doc """ + Returns true if the file is a DETS table. + """ + # credo:disable-for-next-line Credo.Check.Readability.PredicateFunctionNames + def is_dets_file(filename) do + :dets.is_dets_file(filename) + end + + @doc """ + Returns a list of all objects with the given key. + """ + def lookup(key) do + :dets.lookup(@cache_name, key) + end + + @doc """ + Continues a match started with match/2. + """ + def match(continuation) when not is_tuple(continuation) do + :dets.match(continuation) + end + + @doc """ + Matches the objects in the table against the pattern. + """ + def match(pattern) do + :dets.match(@cache_name, pattern) + end + + @doc """ + Matches the objects in the table against the pattern with a limit. + """ + def match(pattern, limit) do + :dets.match(@cache_name, pattern, limit) + end + + @doc """ + Deletes all objects that match the pattern from the table. + """ + def match_delete(pattern) do + :dets.match_delete(@cache_name, pattern) + end + + @doc """ + Continues a match_object started with match_object/2. + """ + def match_object(continuation) when not is_tuple(continuation) do + :dets.match_object(continuation) + end + @doc """ Match objects in the DETS table that match the given pattern. @@ -66,6 +222,55 @@ defmodule Cache.DETS do :dets.member(@cache_name, key) end + @doc """ + Returns the next key following the given key. + """ + def next(key) do + :dets.next(@cache_name, key) + end + + @doc """ + Opens an existing DETS table file. + """ + def open_file(filename) do + :dets.open_file(filename) + end + + @doc """ + Opens a DETS table with the given name and arguments. + """ + def open_file(name, args) do + :dets.open_file(name, args) + end + + @doc """ + Returns the table name given the pid of a process that handles requests to a table. + """ + def pid2name(pid) do + :dets.pid2name(pid) + end + + @doc """ + Restores an opaque continuation that has passed through external term format. + """ + def repair_continuation(continuation, match_spec) do + :dets.repair_continuation(continuation, match_spec) + end + + @doc """ + Fixes the table for safe traversal. + """ + def safe_fixtable(fix) do + :dets.safe_fixtable(@cache_name, fix) + end + + @doc """ + Continues a select started with select/2. + """ + def select(continuation) when not is_list(continuation) do + :dets.select(continuation) + end + @doc """ Select objects from the DETS table using a match specification. @@ -91,103 +296,66 @@ defmodule Cache.DETS do end @doc """ - Get information about the DETS table. - - ## Examples - - iex> #{inspect(__MODULE__)}.info() - [...] + Delete objects from the DETS table using a match specification. """ - def info do - :dets.info(@cache_name) + def select_delete(match_spec) do + :dets.select_delete(@cache_name, match_spec) end @doc """ - Get specific information about the DETS table. - - ## Examples - - iex> #{inspect(__MODULE__)}.info(:size) - 42 + Returns the list of objects associated with slot I. """ - def info(item) do - :dets.info(@cache_name, item) + def slot(i) do + :dets.slot(@cache_name, i) end @doc """ - Delete objects from the DETS table using a match specification. - - ## Examples - - iex> #{inspect(__MODULE__)}.select_delete([{{:key, :_}, [], [true]}]) - 42 + Ensures that all updates made to the table are written to disk. """ - def select_delete(match_spec) do - :dets.select_delete(@cache_name, match_spec) + def sync do + :dets.sync(@cache_name) end @doc """ - Delete objects from the DETS table that match the given pattern. + Returns a QLC query handle for the table. + """ + def table do + :dets.table(@cache_name) + end - ## Examples + @doc """ + Returns a QLC query handle for the table with options. + """ + def table(options) do + :dets.table(@cache_name, options) + end - iex> #{inspect(__MODULE__)}.match_delete({:key, :_}) - :ok + @doc """ + Applies a function to each object stored in the table. """ - def match_delete(pattern) do - :dets.match_delete(@cache_name, pattern) + def traverse(fun) do + :dets.traverse(@cache_name, fun) end @doc """ Update a counter in the DETS table. - - ## Examples - - iex> #{inspect(__MODULE__)}.update_counter(:counter_key, {2, 1}) - 43 """ def update_counter(key, update_op) do :dets.update_counter(@cache_name, key, update_op) end @doc """ - Insert raw data into the DETS table using the underlying :dets.insert/2 function. - - ## Examples - - iex> #{inspect(__MODULE__)}.insert_raw({:key, "value"}) - :ok - iex> #{inspect(__MODULE__)}.insert_raw([{:key1, "value1"}, {:key2, "value2"}]) - :ok + Convert a DETS table to the given ETS table. """ - def insert_raw(data) do - :dets.insert(@cache_name, data) + def to_ets(ets_table) do + :dets.to_ets(@cache_name, ets_table) end - if function_exported?(:dets, :to_ets, 1) do - @doc """ - Convert a DETS table to an ETS table. - - ## Examples - - iex> #{inspect(__MODULE__)}.to_ets() - :my_ets_table - """ - def to_ets do - :dets.to_ets(@cache_name) - end - - @doc """ - Convert an ETS table to a DETS table. - - ## Examples - - iex> #{inspect(__MODULE__)}.from_ets(:my_ets_table) - :ok - """ - def from_ets(ets_table) do - :dets.from_ets(@cache_name, ets_table) - end + @doc """ + Convert an ETS table to a DETS table. + """ + def from_ets(ets_table) do + :dets.from_ets(@cache_name, ets_table) end end end diff --git a/lib/cache/ets.ex b/lib/cache/ets.ex index ca3cdae..ad36a38 100644 --- a/lib/cache/ets.ex +++ b/lib/cache/ets.ex @@ -1,4 +1,20 @@ defmodule Cache.ETS do + require Logger + require Cache.OTPVersion + + @exit_signals [ + :sigabrt, + :sigalrm, + :sigchld, + :sighup, + :sigquit, + :sigstop, + :sigterm, + :sigtstp, + :sigusr1, + :sigusr2 + ] + @opts_definition [ write_concurrency: [ type: :boolean, @@ -20,6 +36,10 @@ defmodule Cache.ETS do compressed: [ type: :boolean, doc: "Enable ets compression" + ], + rehydration_path: [ + type: :string, + doc: "Path to store the ETS table on exit and rehydrate on startup" ] ] @@ -60,13 +80,206 @@ defmodule Cache.ETS do defmacro __using__(_opts) do quote do + require Cache.OTPVersion + @doc """ - Match objects in the ETS table that match the given pattern. + Returns a list of all ETS tables at the node. + """ + def all do + :ets.all() + end + + @doc """ + Deletes the entire ETS table. + """ + def delete_table do + :ets.delete(@cache_name) + end + + @doc """ + Deletes all objects in the ETS table. + """ + def delete_all_objects do + :ets.delete_all_objects(@cache_name) + end + + @doc """ + Deletes the exact object from the ETS table. + """ + def delete_object(object) do + :ets.delete_object(@cache_name, object) + end - ## Examples + @doc """ + Reads a file produced by tab2file/1,2 and creates the corresponding table. + """ + def file2tab(filename) do + :ets.file2tab(filename) + end + + @doc """ + Reads a file produced by tab2file/1,2 and creates the corresponding table with options. + """ + def file2tab(filename, options) do + :ets.file2tab(filename, options) + end - iex> #{inspect(__MODULE__)}.match_object({:_, :_}) - [...] + @doc """ + Returns the first key in the table. + """ + def first do + :ets.first(@cache_name) + end + + if Cache.OTPVersion.otp_release_at_least?(26) do + @doc """ + Returns the first key and object(s) in the table. (OTP 26+) + """ + def first_lookup do + :ets.first_lookup(@cache_name) + end + end + + @doc """ + Folds over all objects in the table from first to last. + """ + def foldl(function, acc) do + :ets.foldl(function, acc, @cache_name) + end + + @doc """ + Folds over all objects in the table from last to first. + """ + def foldr(function, acc) do + :ets.foldr(function, acc, @cache_name) + end + + @doc """ + Makes process pid the new owner of the table. + """ + def give_away(pid, gift_data) do + :ets.give_away(@cache_name, pid, gift_data) + end + + @doc """ + Get information about the ETS table. + """ + def info do + :ets.info(@cache_name) + end + + @doc """ + Get specific information about the ETS table. + """ + def info(item) do + :ets.info(@cache_name, item) + end + + @doc """ + Replaces the existing objects of the table with objects created by calling the input function. + """ + def init_table(init_fun) do + :ets.init_table(@cache_name, init_fun) + end + + @doc """ + Insert raw data into the ETS table using the underlying :ets.insert/2 function. + """ + def insert_raw(data) do + :ets.insert(@cache_name, data) + end + + @doc """ + Same as insert/2 except returns false if any object with the same key already exists. + """ + def insert_new(data) do + :ets.insert_new(@cache_name, data) + end + + @doc """ + Checks if a term represents a valid compiled match specification. + """ + # credo:disable-for-next-line Credo.Check.Readability.PredicateFunctionNames + def is_compiled_ms(term) do + :ets.is_compiled_ms(term) + end + + @doc """ + Returns the last key in the table (for ordered_set, otherwise same as first). + """ + def last do + :ets.last(@cache_name) + end + + if Cache.OTPVersion.otp_release_at_least?(26) do + @doc """ + Returns the last key and object(s) in the table. (OTP 26+) + """ + def last_lookup do + :ets.last_lookup(@cache_name) + end + end + + @doc """ + Returns a list of all objects with the given key. + """ + def lookup(key) do + :ets.lookup(@cache_name, key) + end + + @doc """ + Returns the Pos:th element of the object with the given key. + """ + def lookup_element(key, pos) do + :ets.lookup_element(@cache_name, key, pos) + end + + if Cache.OTPVersion.otp_release_at_least?(26) do + @doc """ + Returns the Pos:th element of the object with the given key, or default if not found. (OTP 26+) + """ + def lookup_element(key, pos, default) do + :ets.lookup_element(@cache_name, key, pos, default) + end + end + + @doc """ + Continues a match started with match/2. + """ + def match(continuation) do + :ets.match(continuation) + end + + @doc """ + Matches the objects in the table against the pattern. + """ + def match_pattern(pattern) do + :ets.match(@cache_name, pattern) + end + + @doc """ + Matches the objects in the table against the pattern with a limit. + """ + def match_pattern(pattern, limit) do + :ets.match(@cache_name, pattern, limit) + end + + @doc """ + Deletes all objects that match the pattern from the table. + """ + def match_delete(pattern) do + :ets.match_delete(@cache_name, pattern) + end + + @doc """ + Continues a match_object started with match_object/2. + """ + def match_object(continuation) when not is_tuple(continuation) do + :ets.match_object(continuation) + end + + @doc """ + Match objects in the ETS table that match the given pattern. """ def match_object(pattern) do :ets.match_object(@cache_name, pattern) @@ -74,35 +287,94 @@ defmodule Cache.ETS do @doc """ Match objects in the ETS table that match the given pattern with limit. - - ## Examples - - iex> #{inspect(__MODULE__)}.match_object({:_, :_}, 10) - {[...], continuation} """ def match_object(pattern, limit) do :ets.match_object(@cache_name, pattern, limit) end @doc """ - Check if a key is a member of the ETS table. + Transforms a match specification into an internal representation. + """ + def match_spec_compile(match_spec) do + :ets.match_spec_compile(match_spec) + end - ## Examples + @doc """ + Executes the matching specified in a compiled match specification on a list of terms. + """ + def match_spec_run(list, compiled_match_spec) do + :ets.match_spec_run(list, compiled_match_spec) + end - iex> #{inspect(__MODULE__)}.member(:key) - true + @doc """ + Check if a key is a member of the ETS table. """ def member(key) do :ets.member(@cache_name, key) end @doc """ - Select objects from the ETS table using a match specification. + Returns the next key following the given key. + """ + def next(key) do + :ets.next(@cache_name, key) + end + + if Cache.OTPVersion.otp_release_at_least?(26) do + @doc """ + Returns the next key and object(s) following the given key. (OTP 26+) + """ + def next_lookup(key) do + :ets.next_lookup(@cache_name, key) + end + end + + @doc """ + Returns the previous key preceding the given key (for ordered_set). + """ + def prev(key) do + :ets.prev(@cache_name, key) + end + + if Cache.OTPVersion.otp_release_at_least?(26) do + @doc """ + Returns the previous key and object(s) preceding the given key. (OTP 26+) + """ + def prev_lookup(key) do + :ets.prev_lookup(@cache_name, key) + end + end - ## Examples + @doc """ + Renames the table to the new name. + """ + def rename(name) do + :ets.rename(@cache_name, name) + end - iex> #{inspect(__MODULE__)}.select([{{:key, :_}, [], [:'$_']}]) - [...] + @doc """ + Restores an opaque continuation that has passed through external term format. + """ + def repair_continuation(continuation, match_spec) do + :ets.repair_continuation(continuation, match_spec) + end + + @doc """ + Fixes the table for safe traversal. + """ + def safe_fixtable(fix) do + :ets.safe_fixtable(@cache_name, fix) + end + + @doc """ + Continues a select started with select/2. + """ + def select(continuation) when not is_list(continuation) do + :ets.select(continuation) + end + + @doc """ + Select objects from the ETS table using a match specification. """ def select(match_spec) do :ets.select(@cache_name, match_spec) @@ -110,114 +382,172 @@ defmodule Cache.ETS do @doc """ Select objects from the ETS table using a match specification with limit. - - ## Examples - - iex> #{inspect(__MODULE__)}.select([{{:key, :_}, [], [:'$_']}], 10) - {[...], continuation} """ def select(match_spec, limit) do :ets.select(@cache_name, match_spec, limit) end @doc """ - Get information about the ETS table. + Counts the objects matching the match specification. + """ + def select_count(match_spec) do + :ets.select_count(@cache_name, match_spec) + end - ## Examples + @doc """ + Delete objects from the ETS table using a match specification. + """ + def select_delete(match_spec) do + :ets.select_delete(@cache_name, match_spec) + end - iex> #{inspect(__MODULE__)}.info() - [...] + @doc """ + Replaces objects matching the match specification with the match specification result. """ - def info do - :ets.info(@cache_name) + def select_replace(match_spec) do + :ets.select_replace(@cache_name, match_spec) end @doc """ - Get specific information about the ETS table. + Continues a select_reverse started with select_reverse/2. + """ + def select_reverse(continuation) when not is_list(continuation) do + :ets.select_reverse(continuation) + end - ## Examples + @doc """ + Like select/1 but returns the list in reverse order for ordered_set. + """ + def select_reverse(match_spec) do + :ets.select_reverse(@cache_name, match_spec) + end - iex> #{inspect(__MODULE__)}.info(:size) - 42 + @doc """ + Like select/2 but traverses in reverse order for ordered_set. """ - def info(item) do - :ets.info(@cache_name, item) + def select_reverse(match_spec, limit) do + :ets.select_reverse(@cache_name, match_spec, limit) end @doc """ - Delete objects from the ETS table using a match specification. + Sets table options (only heir is allowed after creation). + """ + def setopts(opts) do + :ets.setopts(@cache_name, opts) + end - ## Examples + @doc """ + Returns the list of objects associated with slot I. + """ + def slot(i) do + :ets.slot(@cache_name, i) + end - iex> #{inspect(__MODULE__)}.select_delete([{{:key, :_}, [], [true]}]) - 42 + @doc """ + Dumps the table to a file. """ - def select_delete(match_spec) do - :ets.select_delete(@cache_name, match_spec) + def tab2file(filename) do + :ets.tab2file(@cache_name, filename) end @doc """ - Delete objects from the ETS table that match the given pattern. + Dumps the table to a file with options. + """ + def tab2file(filename, options) do + :ets.tab2file(@cache_name, filename, options) + end - ## Examples + @doc """ + Returns a list of all objects in the table. + """ + def tab2list do + :ets.tab2list(@cache_name) + end - iex> #{inspect(__MODULE__)}.match_delete({:key, :_}) - true + @doc """ + Returns information about the table dumped to file. """ - def match_delete(pattern) do - :ets.match_delete(@cache_name, pattern) + def tabfile_info(filename) do + :ets.tabfile_info(filename) end @doc """ - Update a counter in the ETS table. + Returns a QLC query handle for the table. + """ + def table do + :ets.table(@cache_name) + end - ## Examples + @doc """ + Returns a QLC query handle for the table with options. + """ + def table(options) do + :ets.table(@cache_name, options) + end - iex> #{inspect(__MODULE__)}.update_counter(:counter_key, {2, 1}) - 43 + @doc """ + Returns and removes all objects with the given key. + """ + def take(key) do + :ets.take(@cache_name, key) + end + + @doc """ + Tests a match specification against a tuple. + """ + def test_ms(tuple, match_spec) do + :ets.test_ms(tuple, match_spec) + end + + @doc """ + Update a counter in the ETS table. """ def update_counter(key, update_op) do :ets.update_counter(@cache_name, key, update_op) end @doc """ - Insert raw data into the ETS table using the underlying :ets.insert/2 function. - - ## Examples + Update a counter in the ETS table with a default value. + """ + def update_counter(key, update_op, default) do + :ets.update_counter(@cache_name, key, update_op, default) + end - iex> #{inspect(__MODULE__)}.insert_raw({:key, "value"}) - true - iex> #{inspect(__MODULE__)}.insert_raw([{:key1, "value1"}, {:key2, "value2"}]) - true + @doc """ + Updates specific elements of an object. """ - def insert_raw(data) do - :ets.insert(@cache_name, data) + def update_element(key, element_spec) do + :ets.update_element(@cache_name, key, element_spec) end - if function_exported?(:ets, :to_dets, 1) do + if Cache.OTPVersion.otp_release_at_least?(26) do @doc """ - Convert an ETS table to a DETS table. - - ## Examples - - iex> #{inspect(__MODULE__)}.to_dets(:my_dets_table) - :ok + Updates specific elements of an object with a default. (OTP 26+) """ - def to_dets(dets_table) do - :ets.to_dets(@cache_name, dets_table) + def update_element(key, element_spec, default) do + :ets.update_element(@cache_name, key, element_spec, default) end + end - @doc """ - Convert a DETS table to an ETS table. + @doc """ + Returns the tid of this named table. + """ + def whereis do + :ets.whereis(@cache_name) + end - ## Examples + @doc """ + Convert an ETS table to a DETS table. + """ + def to_dets(dets_table) do + :ets.to_dets(@cache_name, dets_table) + end - iex> #{inspect(__MODULE__)}.from_dets(:my_dets_table) - :ok - """ - def from_dets(dets_table) do - :ets.from_dets(@cache_name, dets_table) - end + @doc """ + Convert a DETS table to an ETS table. + """ + def from_dets(dets_table) do + :ets.from_dets(@cache_name, dets_table) end end end @@ -229,25 +559,78 @@ defmodule Cache.ETS do def start_link(opts) do Task.start_link(fn -> table_name = opts[:table_name] + rehydration_path = opts[:rehydration_path] - opts = + ets_opts = opts - |> Keyword.drop([:table_name, :type]) + |> Keyword.drop([:table_name, :type, :rehydration_path]) |> Kernel.++([opts[:type], :public, :named_table]) - opts = + ets_opts = if opts[:compressed] do - Keyword.delete(opts, :compressed) ++ [:compressed] + Keyword.delete(ets_opts, :compressed) ++ [:compressed] else - opts + ets_opts end - _ = :ets.new(table_name, opts) + rehydrate_or_create_table(table_name, rehydration_path, ets_opts) + + if rehydration_path do + setup_exit_signal_handlers(table_name, rehydration_path) + end Process.hibernate(Function, :identity, [nil]) end) end + defp rehydrate_or_create_table(table_name, rehydration_path, ets_opts) + when is_binary(rehydration_path) do + file_path = build_file_path(table_name, rehydration_path) + + if File.exists?(file_path) do + case :ets.file2tab(String.to_charlist(file_path)) do + {:ok, ^table_name} -> + Logger.info("[Cache.ETS] Rehydrated #{table_name} from #{file_path}") + :ok + + {:ok, _other_name} -> + Logger.warning("[Cache.ETS] File #{file_path} has different table name, creating new table #{table_name}") + :ets.new(table_name, ets_opts) + + {:error, reason} -> + Logger.warning("[Cache.ETS] Failed to rehydrate #{table_name} from #{file_path}: #{inspect(reason)}, creating new table") + :ets.new(table_name, ets_opts) + end + else + :ets.new(table_name, ets_opts) + end + end + + defp rehydrate_or_create_table(table_name, _rehydration_path, ets_opts) do + :ets.new(table_name, ets_opts) + end + + defp setup_exit_signal_handlers(table_name, rehydration_path) do + file_path = build_file_path(table_name, rehydration_path) + + Enum.each(@exit_signals, fn signal -> + case System.trap_signal(signal, fn -> + :ets.tab2file(table_name, String.to_charlist(file_path)) + :ok + end) do + {:ok, _ref} -> :ok + {:error, reason} -> + Logger.error("[Cache.ETS] Failed to setup exit signal handler for #{table_name}: #{inspect(reason)}") + + :ok + end + end) + end + + defp build_file_path(table_name, rehydration_path) do + Path.join(rehydration_path, "#{table_name}.ets") + end + @impl Cache def child_spec({cache_name, opts}) do %{ @@ -257,6 +640,7 @@ defmodule Cache.ETS do end @impl Cache + @spec get(atom, atom | String.t(), Keyword.t()) :: ErrorMessage.t_res(any) def get(cache_name, key, _opts \\ []) do case :ets.lookup(cache_name, key) do [{^key, value}] -> {:ok, value} @@ -264,19 +648,27 @@ defmodule Cache.ETS do [value] -> {:ok, value} [] -> {:ok, nil} end + rescue + e -> {:error, ErrorMessage.internal_server_error(Exception.message(e), %{cache: cache_name, key: key})} end @impl Cache + @spec put(atom, atom | String.t(), pos_integer | nil, any, Keyword.t()) :: :ok | ErrorMessage.t() def put(cache_name, key, _ttl \\ nil, value, _opts \\ []) do :ets.insert(cache_name, {key, value}) :ok + rescue + e -> {:error, ErrorMessage.internal_server_error(Exception.message(e), %{cache: cache_name, key: key})} end @impl Cache + @spec delete(atom, atom | String.t(), Keyword.t()) :: :ok | ErrorMessage.t() def delete(cache_name, key, _opts \\ []) do :ets.delete(cache_name, key) :ok + rescue + e -> {:error, ErrorMessage.internal_server_error(Exception.message(e), %{cache: cache_name, key: key})} end end diff --git a/lib/cache/otp_version.ex b/lib/cache/otp_version.ex new file mode 100644 index 0000000..489a97a --- /dev/null +++ b/lib/cache/otp_version.ex @@ -0,0 +1,15 @@ +defmodule Cache.OTPVersion do + @moduledoc false + + @otp_release String.to_integer(to_string(:erlang.system_info(:otp_release))) + + def otp_release, do: @otp_release + + defmacro otp_release_at_least?(version) do + otp_release = @otp_release + + quote do + unquote(otp_release) >= unquote(version) + end + end +end diff --git a/lib/cache/sandbox.ex b/lib/cache/sandbox.ex index 9030a04..a9e771a 100644 --- a/lib/cache/sandbox.ex +++ b/lib/cache/sandbox.ex @@ -80,15 +80,68 @@ defmodule Cache.Sandbox do end) end + def get_or_store(cache_name, key, _ttl, store_fun) do + Agent.get_and_update(cache_name, fn state -> + case Map.fetch(state, key) do + {:ok, value} -> + {value, state} + + :error -> + value = store_fun.() + {value, Map.put(state, key, value)} + end + end) + end + + def dirty_get_or_store(cache_name, key, store_fun) do + Agent.get_and_update(cache_name, fn state -> + case Map.fetch(state, key) do + {:ok, value} -> + {value, state} + + :error -> + value = store_fun.() + {value, Map.put(state, key, value)} + end + end) + end + def hash_delete(cache_name, key, hash_key, _opts) do - Agent.update(cache_name, fn state -> - Map.update(state, key, %{}, &Map.delete(&1, hash_key)) + Agent.get_and_update(cache_name, fn state -> + case Map.get(state, key) do + nil -> + {{:ok, 0}, state} + + value when is_map(value) -> + if Map.has_key?(value, hash_key) do + updated = Map.delete(value, hash_key) + + new_state = + if Enum.empty?(updated) do + Map.delete(state, key) + else + Map.put(state, key, updated) + end + + {{:ok, 1}, new_state} + else + {{:ok, 0}, state} + end + + _ -> + {{:ok, 0}, state} + end end) end def hash_get(cache_name, key, hash_key, _opts) do Agent.get(cache_name, fn state -> - {:ok, state[key][hash_key]} + value = + state + |> Map.get(key, %{}) + |> Map.get(hash_key) + + {:ok, value} end) end @@ -104,9 +157,9 @@ defmodule Cache.Sandbox do def hash_get_many(cache_name, keys_fields, _opts) do Agent.get(cache_name, fn state -> values = - Enum.reduce(keys_fields, [], fn {key, fields}, acc -> - values = Enum.map(fields, &state[key][&1]) - acc ++ [values] + Enum.map(keys_fields, fn {key, fields} -> + hash = Map.get(state, key, %{}) + Enum.map(fields, &Map.get(hash, &1)) end) {:ok, values} @@ -120,32 +173,49 @@ defmodule Cache.Sandbox do end def hash_set(cache_name, key, field, value, ttl, _opts) do - Agent.update(cache_name, fn state -> - put_hash_field_values(state, key, [{field, value}]) - end) + count = + Agent.get_and_update(cache_name, fn state -> + hash = Map.get(state, key, %{}) + is_new_field = not Map.has_key?(hash, field) + updated_hash = Map.put(hash, field, value) + + {if(is_new_field, do: 1, else: 0), Map.put(state, key, updated_hash)} + end) if ttl do - {:ok, [1, 1]} + {:ok, [count, 1]} else - :ok + {:ok, count} end end def hash_set_many(cache_name, keys_fields_values, ttl, _opts) do - Agent.update(cache_name, fn state -> - Enum.reduce(keys_fields_values, state, fn {key, fields_values}, acc -> - put_hash_field_values(acc, key, fields_values) + counts = + Agent.get_and_update(cache_name, fn state -> + {counts, new_state} = + Enum.map_reduce(keys_fields_values, state, fn {key, fields_values}, acc -> + hash = Map.get(acc, key, %{}) + + {updated_hash, count} = + Enum.reduce(fields_values, {hash, 0}, fn {field, value}, {hash_acc, count_acc} -> + is_new_field = not Map.has_key?(hash_acc, field) + updated_hash = Map.put(hash_acc, field, value) + new_count = if is_new_field, do: count_acc + 1, else: count_acc + + {updated_hash, new_count} + end) + + {count, Map.put(acc, key, updated_hash)} + end) + + {counts, new_state} end) - end) if ttl do - command_resps = - Enum.map(keys_fields_values, fn {_, fields_values} -> enum_length(fields_values) end) - - expiry_resps = Enum.map(keys_fields_values, fn _ -> 1 end) - {:ok, command_resps ++ expiry_resps} + expiry_resps = List.duplicate(1, enum_length(keys_fields_values)) + {:ok, counts ++ expiry_resps} else - :ok + {:ok, counts} end end @@ -185,44 +255,84 @@ defmodule Cache.Sandbox do end def json_incr(cache_name, key, path, incr \\ 1, _opts) do - Agent.update(cache_name, fn state -> - Map.update(state, key, nil, fn value -> - update_in(value, String.split(path), &(&1 + incr)) - end) + path_parts = json_path_parts(path) + path_string = json_path_string(path) + + Agent.get_and_update(cache_name, fn state -> + case get_in(state, [key | path_parts]) do + nil -> + {{:error, ErrorMessage.not_found("ERR Path '$.#{path_string}' does not exist")}, state} + + value -> + new_value = value + incr + {{:ok, new_value}, put_in(state, [key | path_parts], new_value)} + end end) end def json_clear(cache_name, key, path, _opts) do - Agent.update(cache_name, fn state -> - Map.update( - state, - key, - nil, - &update_in(&1, String.split(path, "."), fn - integer when is_integer(integer) -> 0 - list when is_list(list) -> [] - map when is_map(map) -> %{} - _ -> nil - end) - ) + path_parts = json_path_parts(path) + + Agent.get_and_update(cache_name, fn state -> + case get_in(state, [key | path_parts]) do + nil -> + {{:ok, 0}, state} + + value -> + updated_value = + case value do + integer when is_integer(integer) -> 0 + list when is_list(list) -> [] + map when is_map(map) -> %{} + _ -> nil + end + + {{:ok, 1}, put_in(state, [key | path_parts], updated_value)} + end end) end def json_delete(cache_name, key, path, _opts) do - Agent.update(cache_name, fn state -> - Map.update(state, key, nil, fn value -> - {_, state} = pop_in(value, String.split(path, ".")) + path_parts = json_path_parts(path) - state - end) + Agent.get_and_update(cache_name, fn state -> + case get_in(state, [key | path_parts]) do + nil -> + {{:ok, 0}, state} + + _value -> + {_, updated} = pop_in(state, [key | path_parts]) + {{:ok, 1}, updated} + end end) end + def json_array_append(cache_name, key, path, values, _opts) when is_list(values) do + append_json_array(cache_name, key, path, values, &stringify_value/1) + end + def json_array_append(cache_name, key, path, value, _opts) do - Agent.update(cache_name, fn state -> - Map.update(state, key, nil, fn state_value -> - update_in(state_value, String.split(path, "."), &(&1 ++ [value])) - end) + append_json_array(cache_name, key, path, [value], &stringify_value/1) + end + + defp append_json_array(cache_name, key, path, values, value_transformer) do + path_parts = json_path_parts(path) + path_string = json_path_string(path) + updated_values = Enum.map(values, value_transformer) + + Agent.get_and_update(cache_name, fn state -> + case get_in(state, [key | path_parts]) do + nil -> + {{:error, ErrorMessage.not_found("ERR Path '$.#{path_string}' does not exist")}, state} + + list when is_list(list) -> + updated_list = list ++ updated_values + new_state = put_in(state, [key | path_parts], updated_list) + {{:ok, enum_length(updated_list)}, new_state} + + _ -> + {{:error, ErrorMessage.not_found("ERR Path '$.#{path_string}' does not exist")}, state} + end end) end @@ -243,29 +353,212 @@ defmodule Cache.Sandbox do raise "Not Implemented" end - def scan(cache_name, _scan_opts, _opts) do + def scan(cache_name, scan_opts, _opts) do + match = scan_opts[:match] || "*" + count = scan_opts[:count] + type = scan_opts[:type] + Agent.get(cache_name, fn state -> - {:ok, Map.keys(state)} + values = + state + |> Enum.filter(fn {_key, value} -> scan_type_match?(value, type) end) + |> Enum.map(fn {key, _value} -> {key, scan_key(key)} end) + |> Enum.filter(fn {_key, match_key} -> scan_match?(match_key, match) end) + |> Enum.map(fn {_key, match_key} -> match_key end) + |> apply_scan_count(count) + + {:ok, values} end) end - def hash_scan(cache_name, key, _scan_opts, _opts) do + def hash_scan(cache_name, key, scan_opts, _opts) do + match = scan_opts[:match] || "*" + count = scan_opts[:count] + Agent.get(cache_name, fn state -> case Map.get(state, key) do - nil -> {:ok, []} - map when is_map(map) -> {:ok, Map.keys(map)} - _ -> {:ok, []} + map when is_map(map) -> + elements = + map + |> Enum.filter(fn {field, _value} -> scan_match?(field, match) end) + |> apply_scan_count(count) + + {:ok, elements} + + _ -> + {:ok, []} end end) end # ETS & DETS Compatibility + + def all do + Agent.get(Cache.Sandbox, fn state -> Map.keys(state) end) + rescue + _ -> [] + end + + def delete_table(cache_name) do + Agent.update(cache_name, fn _ -> %{} end) + true + end + + def delete_all_objects(cache_name) do + Agent.update(cache_name, fn _ -> %{} end) + true + end + + def delete_object(cache_name, object) when is_tuple(object) do + key = elem(object, 0) + + Agent.update(cache_name, fn state -> + case Map.get(state, key) do + ^object -> Map.delete(state, key) + _ -> state + end + end) + + true + end + + def first(cache_name) do + Agent.get(cache_name, fn state -> + case Map.keys(state) do + [] -> :"$end_of_table" + [key | _] -> key + end + end) + end + + def first_lookup(cache_name) do + Agent.get(cache_name, fn state -> + case Map.to_list(state) do + [] -> :"$end_of_table" + [{key, value} | _] -> {key, [{key, value}]} + end + end) + end + + def last(cache_name) do + Agent.get(cache_name, fn state -> + case state |> Map.keys() |> Enum.reverse() do + [] -> :"$end_of_table" + [key | _] -> key + end + end) + end + + def last_lookup(cache_name) do + Agent.get(cache_name, fn state -> + case state |> Map.to_list() |> Enum.reverse() do + [] -> :"$end_of_table" + [{key, value} | _] -> {key, [{key, value}]} + end + end) + end + + def next(cache_name, key) do + Agent.get(cache_name, fn state -> + keys = state |> Map.keys() |> Enum.sort() + find_next_key(keys, key) + end) + end + + def next_lookup(cache_name, key) do + Agent.get(cache_name, fn state -> + keys = state |> Map.keys() |> Enum.sort() + + case find_next_key(keys, key) do + :"$end_of_table" -> :"$end_of_table" + next_key -> {next_key, [{next_key, Map.get(state, next_key)}]} + end + end) + end + + def prev(cache_name, key) do + Agent.get(cache_name, fn state -> + keys = state |> Map.keys() |> Enum.sort() |> Enum.reverse() + find_next_key(keys, key) + end) + end + + def prev_lookup(cache_name, key) do + Agent.get(cache_name, fn state -> + keys = state |> Map.keys() |> Enum.sort() |> Enum.reverse() + + case find_next_key(keys, key) do + :"$end_of_table" -> :"$end_of_table" + prev_key -> {prev_key, [{prev_key, Map.get(state, prev_key)}]} + end + end) + end + + defp find_next_key([], _key), do: :"$end_of_table" + defp find_next_key([_], _key), do: :"$end_of_table" + + defp find_next_key([current, next | _rest], key) when current === key do + next + end + + defp find_next_key([_ | rest], key), do: find_next_key(rest, key) + + def foldl(cache_name, function, acc) do + Agent.get(cache_name, fn state -> + state + |> Map.to_list() + |> Enum.reduce(acc, fn {key, value}, acc_inner -> + function.({key, value}, acc_inner) + end) + end) + end + + def foldr(cache_name, function, acc) do + Agent.get(cache_name, fn state -> + state + |> Map.to_list() + |> Enum.reverse() + |> Enum.reduce(acc, fn {key, value}, acc_inner -> + function.({key, value}, acc_inner) + end) + end) + end + def member(cache_name, key) do Agent.get(cache_name, fn state -> Map.has_key?(state, key) end) end + def lookup(cache_name, key) do + Agent.get(cache_name, fn state -> + case Map.get(state, key) do + nil -> [] + value -> [{key, value}] + end + end) + end + + def lookup_element(cache_name, key, pos) do + Agent.get(cache_name, fn state -> + case Map.get(state, key) do + nil -> raise ArgumentError, "key not found" + value when is_tuple(value) -> elem(value, pos - 1) + value -> value + end + end) + end + + def lookup_element(cache_name, key, pos, default) do + Agent.get(cache_name, fn state -> + case Map.get(state, key) do + nil -> default + value when is_tuple(value) -> elem(value, pos - 1) + value -> value + end + end) + end + def update_counter(cache_name, key, {_pos, incr}) do Agent.get_and_update(cache_name, fn state -> current_value = state[key] || 0 @@ -274,8 +567,33 @@ defmodule Cache.Sandbox do end) end + def update_counter(cache_name, key, incr) when is_integer(incr) do + Agent.get_and_update(cache_name, fn state -> + current_value = state[key] || 0 + new_value = current_value + incr + {new_value, Map.put(state, key, new_value)} + end) + end + + def update_counter(cache_name, key, update_op, default) do + Agent.get_and_update(cache_name, fn state -> + if Map.has_key?(state, key) do + current_value = state[key] + incr = if is_tuple(update_op), do: elem(update_op, 1), else: update_op + new_value = current_value + incr + {new_value, Map.put(state, key, new_value)} + else + default_value = if is_tuple(default), do: elem(default, 1), else: default + incr = if is_tuple(update_op), do: elem(update_op, 1), else: update_op + new_value = default_value + incr + {new_value, Map.put(state, key, new_value)} + end + end) + end + def insert_raw(cache_name, data) when is_tuple(data) do - {key, value} = data + key = elem(data, 0) + value = if tuple_size(data) === 2, do: elem(data, 1), else: data Agent.update(cache_name, fn state -> Map.put(state, key, value) @@ -286,7 +604,9 @@ defmodule Cache.Sandbox do def insert_raw(cache_name, data) when is_list(data) do Agent.update(cache_name, fn state -> - Enum.reduce(data, state, fn {key, value}, acc -> + Enum.reduce(data, state, fn tuple, acc -> + key = elem(tuple, 0) + value = if tuple_size(tuple) === 2, do: elem(tuple, 1), else: tuple Map.put(acc, key, value) end) end) @@ -294,52 +614,508 @@ defmodule Cache.Sandbox do true end - def match_object(_cache_name, _pattern) do - raise "Not Implemented" + def insert_new(cache_name, data) when is_tuple(data) do + key = elem(data, 0) + value = if tuple_size(data) === 2, do: elem(data, 1), else: data + + Agent.get_and_update(cache_name, fn state -> + if Map.has_key?(state, key) do + {false, state} + else + {true, Map.put(state, key, value)} + end + end) end - def match_object(_cache_name, _pattern, _limit) do - raise "Not Implemented" + def insert_new(cache_name, data) when is_list(data) do + Agent.get_and_update(cache_name, fn state -> + keys_exist = Enum.any?(data, fn tuple -> Map.has_key?(state, elem(tuple, 0)) end) + + if keys_exist do + {false, state} + else + new_state = + Enum.reduce(data, state, fn tuple, acc -> + key = elem(tuple, 0) + value = if tuple_size(tuple) === 2, do: elem(tuple, 1), else: tuple + Map.put(acc, key, value) + end) + + {true, new_state} + end + end) end - def select(_cache_name, _match_spec) do - raise "Not Implemented" + def take(cache_name, key) do + Agent.get_and_update(cache_name, fn state -> + case Map.pop(state, key) do + {nil, state} -> {[], state} + {value, new_state} -> {[{key, value}], new_state} + end + end) end - def select(_cache_name, _match_spec, _limit) do - raise "Not Implemented" + def tab2list(cache_name) do + Agent.get(cache_name, fn state -> + Map.to_list(state) + end) end - def info(_cache_name) do - raise "Not Implemented" + def match_object(cache_name, pattern) do + Agent.get(cache_name, fn state -> + state + |> Map.to_list() + |> Enum.filter(fn {key, value} -> + match_pattern?({key, value}, pattern) + end) + end) end - def info(_cache_name, _item) do - raise "Not Implemented" + def match_object(cache_name, pattern, limit) do + Agent.get(cache_name, fn state -> + results = + state + |> Map.to_list() + |> Enum.filter(fn {key, value} -> + match_pattern?({key, value}, pattern) + end) + |> Enum.take(limit) + + {results, :end_of_table} + end) end - def select_delete(_cache_name, _match_spec) do - raise "Not Implemented" + def match_pattern(cache_name, pattern) do + Agent.get(cache_name, fn state -> + state + |> Map.to_list() + |> Enum.filter(fn {key, value} -> + match_pattern?({key, value}, pattern) + end) + |> Enum.map(fn obj -> extract_bindings(obj, pattern) end) + end) end - def match_delete(_cache_name, _pattern) do - raise "Not Implemented" + def match_pattern(cache_name, pattern, limit) do + Agent.get(cache_name, fn state -> + results = + state + |> Map.to_list() + |> Enum.filter(fn {key, value} -> + match_pattern?({key, value}, pattern) + end) + |> Enum.take(limit) + |> Enum.map(fn obj -> extract_bindings(obj, pattern) end) + + {results, :end_of_table} + end) + end + + defp match_pattern?(_object, :_), do: true + + defp match_pattern?(object, pattern) when is_tuple(pattern) and is_tuple(object) do + if tuple_size(object) === tuple_size(pattern) do + object_list = Tuple.to_list(object) + pattern_list = Tuple.to_list(pattern) + + object_list + |> Enum.zip(pattern_list) + |> Enum.all?(fn {obj_elem, pat_elem} -> + match_element?(obj_elem, pat_elem) + end) + else + false + end + end + + defp match_pattern?(object, pattern), do: match_element?(object, pattern) + + defp match_element?(_obj, :_), do: true + defp match_element?(_obj, pattern) when is_atom(pattern) and pattern !== :_, do: binding?(pattern) or false + defp match_element?(obj, pattern), do: obj === pattern + + defp binding?(atom) when is_atom(atom) do + atom_str = Atom.to_string(atom) + String.starts_with?(atom_str, "$") + end + + defp binding?(_), do: false + + defp extract_bindings(object, pattern) when is_tuple(pattern) and is_tuple(object) do + object_list = Tuple.to_list(object) + pattern_list = Tuple.to_list(pattern) + + object_list + |> Enum.zip(pattern_list) + |> Enum.filter(fn {_obj_elem, pat_elem} -> binding?(pat_elem) end) + |> Enum.map(fn {obj_elem, _pat_elem} -> obj_elem end) + end + + defp extract_bindings(_object, _pattern), do: [] + + def select(cache_name, match_spec) do + Agent.get(cache_name, fn state -> + state + |> Map.to_list() + |> Enum.flat_map(fn {key, value} -> + apply_match_spec({key, value}, match_spec) + end) + end) + end + + def select(cache_name, match_spec, limit) do + Agent.get(cache_name, fn state -> + results = + state + |> Map.to_list() + |> Enum.flat_map(fn {key, value} -> + apply_match_spec({key, value}, match_spec) + end) + |> Enum.take(limit) + + {results, :end_of_table} + end) + end + + defp apply_match_spec(object, match_spec) when is_list(match_spec) do + Enum.flat_map(match_spec, fn spec -> + apply_single_match_spec(object, spec) + end) + end + + defp apply_single_match_spec(object, {pattern, _guards, result_spec}) do + if match_pattern?(object, pattern) do + [transform_result(object, result_spec)] + else + [] + end + end + + defp apply_single_match_spec(_object, _spec), do: [] + + defp transform_result(object, [:"$_"]), do: object + defp transform_result({key, _value}, [:"$$"]), do: [key] + defp transform_result(_object, [result]) when is_atom(result), do: result + defp transform_result(_object, result), do: result + + def select_count(cache_name, match_spec) do + Agent.get(cache_name, fn state -> + state + |> Map.to_list() + |> Enum.count(fn {key, value} -> + result = apply_match_spec({key, value}, match_spec) + result !== [] and hd(result) === true + end) + end) + end + + def select_delete(cache_name, match_spec) do + Agent.get_and_update(cache_name, fn state -> + {to_delete, to_keep} = + state + |> Map.to_list() + |> Enum.split_with(fn {key, value} -> + result = apply_match_spec({key, value}, match_spec) + result !== [] and hd(result) === true + end) + + {length(to_delete), Map.new(to_keep)} + end) + end + + def select_replace(cache_name, match_spec) do + Agent.get_and_update(cache_name, fn state -> + {count, new_state} = + state + |> Map.to_list() + |> Enum.reduce({0, state}, fn {key, value}, {cnt, acc} -> + result = apply_match_spec({key, value}, match_spec) + + case result do + [new_obj] when is_tuple(new_obj) -> + new_key = elem(new_obj, 0) + new_value = if tuple_size(new_obj) === 2, do: elem(new_obj, 1), else: new_obj + {cnt + 1, acc |> Map.delete(key) |> Map.put(new_key, new_value)} + + _ -> + {cnt, acc} + end + end) + + {count, new_state} + end) + end + + def match_delete(cache_name, pattern) do + Agent.update(cache_name, fn state -> + state + |> Map.to_list() + |> Enum.reject(fn {key, value} -> + match_pattern?({key, value}, pattern) + end) + |> Map.new() + end) + + true + end + + def info(cache_name) do + Agent.get(cache_name, fn state -> + [ + size: map_size(state), + type: :set, + named_table: true, + keypos: 1, + protection: :public + ] + end) + end + + def info(cache_name, item) do + info = info(cache_name) + Keyword.get(info, item) + end + + def slot(cache_name, i) do + Agent.get(cache_name, fn state -> + list = Map.to_list(state) + + if i >= length(list) do + :"$end_of_table" + else + [Enum.at(list, i)] + end + end) + end + + def safe_fixtable(_cache_name, _fix) do + true + end + + def init_table(cache_name, init_fun) do + Agent.update(cache_name, fn _state -> + read_init_fun(init_fun, %{}) + end) + + true + end + + defp read_init_fun(init_fun, acc) do + case init_fun.(:read) do + :end_of_input -> + acc + + objects when is_list(objects) -> + new_acc = + Enum.reduce(objects, acc, fn tuple, inner_acc -> + key = elem(tuple, 0) + value = if tuple_size(tuple) === 2, do: elem(tuple, 1), else: tuple + Map.put(inner_acc, key, value) + end) + + read_init_fun(init_fun, new_acc) + + object when is_tuple(object) -> + key = elem(object, 0) + value = if tuple_size(object) === 2, do: elem(object, 1), else: object + read_init_fun(init_fun, Map.put(acc, key, value)) + end end def to_dets(_cache_name, _dets_table) do - raise "Not Implemented" + {:error, :not_supported_in_sandbox} end def from_dets(_cache_name, _dets_table) do - raise "Not Implemented" + {:error, :not_supported_in_sandbox} end def to_ets(_cache_name) do - raise "Not Implemented" + {:error, :not_supported_in_sandbox} + end + + def to_ets(_cache_name, _ets_table) do + {:error, :not_supported_in_sandbox} end def from_ets(_cache_name, _ets_table) do - raise "Not Implemented" + {:error, :not_supported_in_sandbox} + end + + def close(_cache_name) do + :ok + end + + def sync(_cache_name) do + :ok + end + + def traverse(cache_name, fun) do + Agent.get_and_update(cache_name, fn state -> + {results, new_state} = + state + |> Map.to_list() + |> Enum.reduce({[], state}, fn {key, value}, {acc, current_state} -> + case fun.({key, value}) do + :continue -> + {acc, current_state} + + {:continue, result} -> + {[result | acc], current_state} + + {:done, result} -> + {[result | acc], current_state} + + :done -> + {acc, current_state} + end + end) + + {Enum.reverse(results), new_state} + end) + end + + def bchunk(_cache_name, _continuation) do + {:error, :not_supported_in_sandbox} + end + + # credo:disable-for-next-line Credo.Check.Readability.PredicateFunctionNames + def is_compatible_bchunk_format(_cache_name, _bchunk_format) do + false + end + + # credo:disable-for-next-line Credo.Check.Readability.PredicateFunctionNames + def is_dets_file(_filename) do + false + end + + def open_file(_filename) do + {:error, :not_supported_in_sandbox} + end + + def open_file(_name, _args) do + {:error, :not_supported_in_sandbox} + end + + def pid2name(_pid) do + :undefined + end + + def repair_continuation(continuation, _match_spec) do + continuation + end + + def file2tab(_filename) do + {:error, :not_supported_in_sandbox} + end + + def file2tab(_filename, _options) do + {:error, :not_supported_in_sandbox} + end + + def tab2file(_cache_name, _filename) do + {:error, :not_supported_in_sandbox} + end + + def tab2file(_cache_name, _filename, _options) do + {:error, :not_supported_in_sandbox} + end + + def tabfile_info(_filename) do + {:error, :not_supported_in_sandbox} + end + + def table(_cache_name) do + {:error, :not_supported_in_sandbox} + end + + def table(_cache_name, _options) do + {:error, :not_supported_in_sandbox} + end + + def give_away(_cache_name, _pid, _gift_data) do + {:error, :not_supported_in_sandbox} + end + + def rename(_cache_name, _name) do + {:error, :not_supported_in_sandbox} + end + + def setopts(_cache_name, _opts) do + {:error, :not_supported_in_sandbox} + end + + def whereis(_cache_name) do + :undefined + end + + def test_ms(tuple, match_spec) do + :ets.test_ms(tuple, match_spec) + end + + def match_spec_compile(match_spec) do + :ets.match_spec_compile(match_spec) + end + + def match_spec_run(list, compiled_match_spec) do + :ets.match_spec_run(list, compiled_match_spec) + end + + # credo:disable-for-next-line Credo.Check.Readability.PredicateFunctionNames + def is_compiled_ms(term) do + :ets.is_compiled_ms(term) + end + + def update_element(cache_name, key, element_spec) do + Agent.get_and_update(cache_name, fn state -> + case Map.get(state, key) do + nil -> + {false, state} + + value when is_tuple(value) -> + new_value = apply_element_spec(value, element_spec) + {true, Map.put(state, key, new_value)} + + _ -> + {false, state} + end + end) + end + + def update_element(cache_name, key, element_spec, default) do + Agent.get_and_update(cache_name, fn state -> + case Map.get(state, key) do + nil -> + {true, Map.put(state, key, default)} + + value when is_tuple(value) -> + new_value = apply_element_spec(value, element_spec) + {true, Map.put(state, key, new_value)} + + _ -> + {false, state} + end + end) + end + + defp apply_element_spec(tuple, {pos, value}) do + put_elem(tuple, pos - 1, value) + end + + defp apply_element_spec(tuple, specs) when is_list(specs) do + Enum.reduce(specs, tuple, fn {pos, value}, acc -> + put_elem(acc, pos - 1, value) + end) + end + + def select_reverse(cache_name, match_spec) do + result = select(cache_name, match_spec) + Enum.reverse(result) + end + + def select_reverse(cache_name, match_spec, limit) do + {results, continuation} = select(cache_name, match_spec, limit) + {Enum.reverse(results), continuation} end def smembers(_cache_name, _key, _opts) do @@ -350,14 +1126,6 @@ defmodule Cache.Sandbox do raise "Not Implemented" end - defp put_hash_field_values(state, key, fields_values) do - Map.update( - state, - key, - Map.new(fields_values), - &Enum.reduce(fields_values, &1, fn {field, value}, acc -> Map.put(acc, field, value) end) - ) - end defp check_key_exists(state, key) do if Map.has_key?(state, key) do @@ -396,6 +1164,66 @@ defmodule Cache.Sandbox do |> is_integer() end + defp apply_scan_count(values, nil), do: values + defp apply_scan_count(values, count) when is_integer(count), do: Enum.take(values, count) + + defp scan_key(key) do + key + |> to_string() + |> String.split(":", parts: 2) + |> case do + [_prefix, rest] -> rest + [value] -> value + end + end + + defp scan_match?(value, pattern) do + value + |> to_string() + |> then(&Regex.match?(scan_pattern_regex(pattern), &1)) + end + + defp scan_type_match?(_value, nil), do: true + defp scan_type_match?(value, "hash"), do: is_map(value) + defp scan_type_match?(value, "list"), do: is_list(value) + defp scan_type_match?(_value, "string"), do: true + defp scan_type_match?(_value, _type), do: false + + defp scan_pattern_regex(pattern) do + pattern + |> Regex.escape() + |> String.replace("\\*", ".*") + |> String.replace("\\?", ".") + |> then(&("^" <> &1 <> "$")) + |> Regex.compile!() + end + + defp json_path_parts(path) when path in [nil, ["."]] do + [] + end + + defp json_path_parts(path) when is_list(path) do + path + |> JSON.serialize_path() + |> String.split(".") + end + + defp json_path_parts(path) when is_binary(path) do + String.split(path, ".") + end + + defp json_path_string(path) when path in [nil, ["."]] do + "" + end + + defp json_path_string(path) when is_list(path) do + JSON.serialize_path(path) + end + + defp json_path_string(path) when is_binary(path) do + path + end + defp serialize_path_and_get_value(cache_name, key, path) do path = JSON.serialize_path(path) diff --git a/test/cache/ets_test.exs b/test/cache/ets_test.exs index f1b5d60..edfa4be 100644 --- a/test/cache/ets_test.exs +++ b/test/cache/ets_test.exs @@ -129,6 +129,65 @@ defmodule Cache.ETSTest do end end + describe "rehydration_path" do + defmodule RehydrateTestCache do + use Cache, + adapter: Cache.ETS, + name: :rehydrate_test_cache, + opts: [rehydration_path: "/tmp/ets_test"] + end + + defmodule NewTableTestCache do + use Cache, + adapter: Cache.ETS, + name: :new_table_test_cache, + opts: [rehydration_path: "/tmp/ets_test"] + end + + test "rehydrates from file on startup" do + File.mkdir_p!("/tmp/ets_test") + on_exit(fn -> File.rm_rf("/tmp/ets_test") end) + + encoded_value = Cache.TermEncoder.encode("persisted_value", nil) + + :ets.new(:rehydrate_test_cache, [:set, :public, :named_table]) + :ets.insert(:rehydrate_test_cache, {:persisted_key, encoded_value}) + :ets.tab2file(:rehydrate_test_cache, ~c"/tmp/ets_test/rehydrate_test_cache.ets") + :ets.delete(:rehydrate_test_cache) + + start_supervised!( + %{ + id: :rehydrate_cache_sup, + type: :supervisor, + start: {Cache, :start_link, [[RehydrateTestCache], [name: :rehydrate_cache_sup]]} + } + ) + + Process.sleep(100) + + assert {:ok, "persisted_value"} === RehydrateTestCache.get(:persisted_key) + end + + test "creates new table when no file exists" do + File.mkdir_p!("/tmp/ets_test") + on_exit(fn -> File.rm_rf("/tmp/ets_test") end) + + start_supervised!( + %{ + id: :new_table_cache_sup, + type: :supervisor, + start: {Cache, :start_link, [[NewTableTestCache], [name: :new_table_cache_sup]]} + } + ) + + Process.sleep(100) + + assert {:ok, nil} === NewTableTestCache.get(:nonexistent_key) + assert :ok === NewTableTestCache.put(:new_key, "new_value") + assert {:ok, "new_value"} === NewTableTestCache.get(:new_key) + end + end + if function_exported?(:dets, :to_ets, 1) do describe "to_dets/2 and from_dets/2" do test "converts between ETS and DETS tables" do diff --git a/test/cache/redis_hash_test.exs b/test/cache/redis_hash_test.exs index b1abe5b..7da6fab 100644 --- a/test/cache/redis_hash_test.exs +++ b/test/cache/redis_hash_test.exs @@ -127,7 +127,7 @@ defmodule Cache.RedisHashTest do describe "&hash_set/4" do test "sets a field and updates key with a ttl", %{test: test} do test_key = test_key(test, "key") - {:ok, [1, 1]} = RedisCache.hash_set(test_key, :field, "value", 1) + {:ok, [1, 1]} = RedisCache.hash_set(test_key, :field, "value", :timer.seconds(1)) end end @@ -176,8 +176,8 @@ defmodule Cache.RedisHashTest do end test "sets many keys with expiries", %{set_1: set_1, set_2: set_2} do - assert {:ok, [2, 1]} = RedisCache.hash_set_many([set_1], 1) - assert {:ok, [0, 2, 1, 1]} = RedisCache.hash_set_many([set_1, set_2], 1) + assert {:ok, [2, 1]} = RedisCache.hash_set_many([set_1], :timer.seconds(1)) + assert {:ok, [0, 2, 1, 1]} = RedisCache.hash_set_many([set_1, set_2], :timer.seconds(1)) end end