pax_global_header 0000666 0000000 0000000 00000000064 14562521034 0014514 g ustar 00root root 0000000 0000000 52 comment=67942b8db6ba8ae9c39e30d9c0c63fb1525586b3
dalli-3.2.8/ 0000775 0000000 0000000 00000000000 14562521034 0012613 5 ustar 00root root 0000000 0000000 dalli-3.2.8/.github/ 0000775 0000000 0000000 00000000000 14562521034 0014153 5 ustar 00root root 0000000 0000000 dalli-3.2.8/.github/dependabot.yml 0000664 0000000 0000000 00000000166 14562521034 0017006 0 ustar 00root root 0000000 0000000 version: 2
updates:
- package-ecosystem: "github-actions"
directory: "/"
schedule:
interval: "weekly"
dalli-3.2.8/.github/workflows/ 0000775 0000000 0000000 00000000000 14562521034 0016210 5 ustar 00root root 0000000 0000000 dalli-3.2.8/.github/workflows/codeql-analysis.yml 0000664 0000000 0000000 00000004144 14562521034 0022026 0 ustar 00root root 0000000 0000000 # For most projects, this workflow file will not need changing; you simply need
# to commit it to your repository.
#
# You may wish to alter this file to override the set of languages analyzed,
# or to provide custom queries or build logic.
#
# ******** NOTE ********
# We have attempted to detect the languages in your repository. Please check
# the `language` matrix defined below to confirm you have the correct set of
# supported CodeQL languages.
#
name: "CodeQL"
on:
push:
branches: [ main ]
pull_request:
# The branches below must be a subset of the branches above
branches: [ main ]
schedule:
- cron: '22 14 * * 5'
jobs:
analyze:
name: Analyze
runs-on: ubuntu-latest
permissions:
actions: read
contents: read
security-events: write
strategy:
fail-fast: false
matrix:
language: [ 'ruby' ]
steps:
- name: Checkout repository
uses: actions/checkout@v4
# Initializes the CodeQL tools for scanning.
- name: Initialize CodeQL
uses: github/codeql-action/init@v3
with:
languages: ${{ matrix.language }}
# If you wish to specify custom queries, you can do so here or in a config file.
# By default, queries listed here will override any specified in a config file.
# Prefix the list here with "+" to use these queries and those in the config file.
# queries: ./path/to/local/query, your-org/your-repo/queries@main
# Autobuild attempts to build any compiled languages (C/C++, C#, or Java).
# If this step fails, then you should remove it and run the build manually (see below)
- name: Autobuild
uses: github/codeql-action/autobuild@v3
# âšī¸ Command-line programs to run using the OS shell.
# đ https://git.io/JvXDl
# âī¸ If the Autobuild fails above, remove it and uncomment the following three lines
# and modify them (or add more) to build your code if your project
# uses a compiled language
#- run: |
# make bootstrap
# make release
- name: Perform CodeQL Analysis
uses: github/codeql-action/analyze@v3
dalli-3.2.8/.github/workflows/rubocop.yml 0000664 0000000 0000000 00000000527 14562521034 0020410 0 ustar 00root root 0000000 0000000 name: RuboCop
on: [push, pull_request]
jobs:
build:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- name: Set up Ruby
uses: ruby/setup-ruby@v1
with:
ruby-version: 2.6
bundler-cache: true # 'bundle install' and cache
- name: Run RuboCop
run: bundle exec rubocop --parallel
dalli-3.2.8/.github/workflows/tests.yml 0000664 0000000 0000000 00000001666 14562521034 0020106 0 ustar 00root root 0000000 0000000 name: Tests
on: [push, pull_request]
jobs:
test:
runs-on: ubuntu-20.04
strategy:
fail-fast: false
matrix:
ruby-version:
- head
- '3.3'
- '3.2'
- '3.1'
- '3.0'
- '2.7'
- '2.6'
- jruby-9.3
- jruby-9.4
memcached-version: ['1.5.22', '1.6.23']
steps:
- uses: actions/checkout@v4
- name: Install Memcached ${{ matrix.memcached-version }}
working-directory: scripts
env:
MEMCACHED_VERSION: ${{ matrix.memcached-version }}
run: |
chmod +x ./install_memcached.sh
./install_memcached.sh
- name: Set up Ruby ${{ matrix.ruby-version }}
uses: ruby/setup-ruby@v1
with:
ruby-version: ${{ matrix.ruby-version }}
bundler-cache: true # 'bundle install' and cache
- name: Run tests
run: bundle exec rake
env:
RUN_SASL_TESTS: 1
dalli-3.2.8/.gitignore 0000664 0000000 0000000 00000001133 14562521034 0014601 0 ustar 00root root 0000000 0000000 *.gem
*.rbc
/.config
/coverage/
/InstalledFiles
/pkg/
/spec/reports/
/test/tmp/
/test/version_tmp/
/tmp/
## Specific to RubyMotion:
.dat*
.repl_history
build/
## Documentation cache and generated files:
/.yardoc/
/_yardoc/
/doc/
/html/
/rdoc/
profile.html
## Environment normalisation:
/.bundle/
/lib/bundler/man/
# for a library or gem, you might want to ignore these files since the code is
# intended to run in multiple environments; otherwise, check them in:
Gemfile.lock
gemfiles/*.lock
.ruby-version
.ruby-gemset
# unless supporting rvm < 1.11.0 or doing something fancy, ignore this:
.rvmrc
dalli-3.2.8/.rubocop.yml 0000664 0000000 0000000 00000000415 14562521034 0015065 0 ustar 00root root 0000000 0000000 inherit_from: .rubocop_todo.yml
require:
- rubocop-minitest
- rubocop-performance
- rubocop-rake
AllCops:
NewCops: enable
TargetRubyVersion: 2.6
Metrics/BlockLength:
Max: 50
Exclude:
- 'test/**/*'
Style/Documentation:
Exclude:
- 'test/**/*'
dalli-3.2.8/.rubocop_todo.yml 0000664 0000000 0000000 00000001645 14562521034 0016120 0 ustar 00root root 0000000 0000000 # This configuration was generated by
# `rubocop --auto-gen-config`
# on 2022-01-02 05:40:35 UTC using RuboCop version 1.24.1.
# The point is for the user to remove these configuration records
# one by one as the offenses are removed from the code base.
# Note that changes in the inspected code, or installation of new
# versions of RuboCop, may require this file to be generated again.
# Offense count: 1
# Configuration parameters: IgnoredMethods, CountRepeatedAttributes.
Metrics/AbcSize:
Max: 19
# Offense count: 8
# Configuration parameters: CountComments, CountAsOne.
Metrics/ClassLength:
Max: 195
# Offense count: 4
# Configuration parameters: CountComments, CountAsOne, ExcludedMethods, IgnoredMethods.
Metrics/MethodLength:
Exclude:
- 'lib/dalli/pipelined_getter.rb'
- 'lib/dalli/protocol/base.rb'
# Offense count: 1
# Configuration parameters: CountComments, CountAsOne.
Metrics/ModuleLength:
Max: 108
dalli-3.2.8/.standard.yml 0000664 0000000 0000000 00000000420 14562521034 0015210 0 ustar 00root root 0000000 0000000 fix: false # default: false
parallel: true # default: false
ruby_version: 2.5.1 # default: RUBY_VERSION
default_ignores: false # default: true
ignore: # default: []
- 'test/**/*':
- Style/GlobalVars
- Style/Semicolon
dalli-3.2.8/3.0-Upgrade.md 0000664 0000000 0000000 00000002575 14562521034 0015033 0 ustar 00root root 0000000 0000000 # Dalli 3.0
This major version update contains several backwards incompatible changes.
* **:dalli_store** has been removed. Users should migrate to the
official Rails **:mem_cache_store**, documented in the [caching
guide](https://guides.rubyonrails.org/caching_with_rails.html#activesupport-cache-memcachestore).
* Attempting to store a larger value than allowed by memcached used to
print a warning and truncate the value. This now raises an error to
prevent silent data corruption.
* Compression now defaults to `true` for large values (greater than 4KB).
This is intended to minimize errors due to the previous note.
* Errors marshalling values now raise rather than just printing an error.
* The Rack session adapter has been refactored to remove support for thread-unsafe
configurations. You will need to include the `connection_pool` gem in
your Gemfile to ensure session operations are thread-safe.
* Support for the `kgio` gem has been removed, it is not relevant in Ruby 2.3+.
* Removed inline native code, use Ruby 2.3+ support for bsearch instead.
* The CAS operations previously in 'dalli/cas/client' have been
integrated into 'dalli/client'.
## Future Directions
The memcached project has deprecated the binary protocol used by Dalli
in favor of a new `meta/text` protocol that is somewhat human readable.
Dalli 4.0 will move in this direction and require memcached 1.6+.
dalli-3.2.8/CHANGELOG.md 0000664 0000000 0000000 00000053447 14562521034 0014441 0 ustar 00root root 0000000 0000000 Dalli Changelog
=====================
Unreleased
==========
3.2.8
==========
- Handle IO::TimeoutError when establishing connection (eugeneius)
- Drop dependency on base64 gem (Earlopain)
- Address incompatibility with resolv-replace (y9v)
- Add rubygems.org metadata (m-nakamura145)
3.2.7
==========
- Fix cascading error when there's an underlying network error in a pipelined get (eugeneius)
- Ruby 3.4/head compatibility by adding base64 to gemspec (tagliala)
- Add Ruby 3.3 to CI (m-nakamura145)
- Use Socket's connect_timeout when available, and pass timeout to the socket's send and receive timeouts (mlarraz)
3.2.6
==========
- Rescue IO::TimeoutError raised by Ruby since 3.2.0 on blocking reads/writes (skaes)
- Fix rubydoc link (JuanitoFatas)
3.2.5
==========
- Better handle memcached requests being interrupted by Thread#raise or Thread#kill (byroot)
- Unexpected errors are no longer treated as `Dalli::NetworkError`, including errors raised by `Timeout.timeout` (byroot)
3.2.4
==========
- Cache PID calls for performance since glibc no longer caches in recent versions (byroot)
- Preallocate the read buffer in Socket#readfull (byroot)
3.2.3
==========
- Sanitize CAS inputs to ensure additional commands are not passed to memcached (xhzeem / petergoldstein)
- Sanitize input to flush command to ensure additional commands are not passed to memcached (xhzeem / petergoldstein)
- Namespaces passed as procs are now evaluated every time, as opposed to just on initialization (nrw505)
- Fix missing require of uri in ServerConfigParser (adam12)
- Fix link to the CHANGELOG.md file in README.md (rud)
3.2.2
==========
- Ensure apps are resilient against old session ids (kbrock)
3.2.1
==========
- Fix null replacement bug on some SASL-authenticated services (veritas1)
3.2.0
==========
- BREAKING CHANGE: Remove protocol_implementation client option (petergoldstein)
- Add protocol option with meta implementation (petergoldstein)
3.1.6
==========
- Fix bug with cas/cas! with "Not found" value (petergoldstein)
- Add Ruby 3.1 to CI (petergoldstein)
- Replace reject(&:nil?) with compact (petergoldstein)
3.1.5
==========
- Fix bug with get_cas key with "Not found" value (petergoldstein)
- Replace should return nil, not raise error, on miss (petergoldstein)
3.1.4
==========
- Improve response parsing performance (byroot)
- Reorganize binary protocol parsing a bit (petergoldstein)
- Fix handling of non-ASCII keys in get_multi (petergoldstein)
3.1.3
==========
- Restore falsey behavior on delete/delete_cas for nonexistent key (petergoldstein)
3.1.2
==========
- Make quiet? / multi? public on Dalli::Protocol::Binary (petergoldstein)
3.1.1
==========
- Add quiet support for incr, decr, append, depend, and flush (petergoldstein)
- Additional refactoring to allow reuse of connection behavior (petergoldstein)
- Fix issue in flush such that it wasn't passing the delay argument to memcached (petergoldstein)
3.1.0
==========
- BREAKING CHANGE: Update Rack::Session::Dalli to inherit from Abstract::PersistedSecure. This will invalidate existing sessions (petergoldstein)
- BREAKING CHANGE: Use of unsupported operations in a multi block now raise an error. (petergoldstein)
- Extract PipelinedGetter from Dalli::Client (petergoldstein)
- Fix SSL socket so that it works with pipelined gets (petergoldstein)
- Additional refactoring to split classes (petergoldstein)
3.0.6
==========
- Fix regression in SASL authentication response parsing (petergoldstein)
3.0.5
==========
- Add Rubocop and fix most outstanding issues (petergoldstein)
- Extract a number of classes, to simplify the largest classes (petergoldstein)
- Ensure against socket corruption if an error occurs in a multi block (petergoldstein)
3.0.4
==========
- Clean connections and retry after NetworkError in get_multi (andrejbl)
- Internal refactoring and cleanup (petergoldstein)
3.0.3
==========
- Restore ability for `compress` to be disabled on a per request basis (petergoldstein)
- Fix broken image in README (deining)
- Use bundler-cache in CI (olleolleolle)
- Remove the OpenSSL extensions dependency (petergoldstein)
- Add Memcached 1.5.x to the CI matrix
- Updated compression documentation (petergoldstein)
3.0.2
==========
- Restore Windows compatibility (petergoldstein)
- Add JRuby to CI and make requisite changes (petergoldstein)
- Clarify documentation for supported rubies (petergoldstein)
3.0.1
==========
- Fix syntax error that prevented inclusion of Dalli::Server (ryanfb)
- Restore with method required by ActiveSupport::Cache::MemCacheStore
3.0.0
==========
- BREAKING CHANGES:
* Removes :dalli_store.
Use Rails' official :mem_cache_store instead.
https://guides.rubyonrails.org/caching_with_rails.html
* Attempting to store a larger value than allowed by memcached used to
print a warning and truncate the value. This now raises an error to
prevent silent data corruption.
* Compression now defaults to `true` for large values (greater than 4KB).
This is intended to minimize errors due to the previous note.
* Errors marshalling values now raise rather than just printing an error.
* The Rack session adapter has been refactored to remove support for thread-unsafe
configurations. You will need to include the `connection_pool` gem in
your Gemfile to ensure session operations are thread-safe.
* When using namespaces, the algorithm for calculating truncated keys was
changed. Non-truncated keys and truncated keys for the non-namespace
case were left unchanged.
- Raise NetworkError when multi response gets into corrupt state (mervync, #783)
- Validate servers argument (semaperepelitsa, petergoldstein, #776)
- Enable SSL support (bdunne, #775)
- Add gat operation (tbeauvais, #769)
- Removes inline native code, use Ruby 2.3+ support for bsearch instead. (mperham)
- Switch repo to Github Actions and upgrade Ruby versions (petergoldstein, bdunne, Fryguy)
- Update benchmark test for Rubyprof changes (nateberkopec)
- Remove support for the `kgio` gem, it is not relevant in Ruby 2.3+. (mperham)
- Remove inline native code, use Ruby 2.3+ support for bsearch instead. (mperham)
2.7.11
==========
- DEPRECATION: :dalli_store will be removed in Dalli 3.0.
Use Rails' official :mem_cache_store instead.
https://guides.rubyonrails.org/caching_with_rails.html
- Add new `digest_class` option to Dalli::Client [#724]
- Don't treat NameError as a network error [#728]
- Handle nested comma separated server strings (sambostock)
2.7.10
==========
- Revert frozen string change (schneems)
- Advertise supports_cached_versioning? in DalliStore (schneems)
- Better detection of fork support, to allow specs to run under Truffle Ruby (deepj)
- Update logging for over max size to log as error (aeroastro)
2.7.9
==========
- Fix behavior for Rails 5.2+ cache_versioning (GriwMF)
- Ensure fetch provides the key to the fallback block as an argument (0exp)
- Assorted performance improvements (schneems)
2.7.8
==========
- Rails 5.2 compatibility (pbougie)
- Fix Session Cache compatibility (pixeltrix)
2.7.7
==========
- Support large cache keys on fetch multi (sobrinho)
- Not found checks no longer trigger the result's equality method (dannyfallon)
- Use SVG build badges (olleolleolle)
- Travis updates (junaruga, tiarly, petergoldstein)
- Update default down_retry_delay (jaredhales)
- Close kgio socket after IO.select timeouts
- Documentation updates (tipair)
- Instrument DalliStore errors with instrument_errors configuration option. (btatnall)
2.7.6
==========
- Rails 5.0.0.beta2 compatibility (yui-knk, petergoldstein)
- Add cas!, a variant of the #cas method that yields to the block whether or not the key already exist (mwpastore)
- Performance improvements (nateberkopec)
- Add Ruby 2.3.0 to support matrix (tricknotes)
2.7.5
==========
- Support rcvbuff and sndbuff byte configuration. (btatnall)
- Add `:cache_nils` option to support nil values in `DalliStore#fetch` and `Dalli::Client#fetch` (wjordan, #559)
- Log retryable server errors with 'warn' instead of 'info' (phrinx)
- Fix timeout issue with Dalli::Client#get_multi_yielder (dspeterson)
- Escape namespaces with special regexp characters (Steven Peckins)
- Ensure LocalCache supports the `:raw` option and Entry unwrapping (sj26)
- Ensure bad ttl values don't cause Dalli::RingError (eagletmt, petergoldstein)
- Always pass namespaced key to instrumentation API (kaorimatz)
- Replace use of deprecated TimeoutError with Timeout::Error (eagletmt)
- Clean up gemspec, and use Bundler for loading (grosser)
- Dry up local cache testing (grosser)
2.7.4
==========
- Restore Windows compatibility (dfens, #524)
2.7.3
==========
- Assorted spec improvements
- README changes to specify defaults for failover and compress options (keen99, #470)
- SASL authentication changes to deal with Unicode characters (flypiggy, #477)
- Call to_i on ttl to accomodate ActiveSupport::Duration (#494)
- Change to implicit blocks for performance (glaucocustodio, #495)
- Change to each_key for performance (jastix, #496)
- Support stats settings - (dterei, #500)
- Raise DallError if hostname canno be parsed (dannyfallon, #501)
- Fix instrumentation for falsey values (AlexRiedler, #514)
- Support UNIX socket configurations (r-stu31, #515)
2.7.2
==========
- The fix for #423 didn't make it into the released 2.7.1 gem somehow.
2.7.1
==========
- Rack session will check if servers are up on initialization (arthurnn, #423)
- Add support for IPv6 addresses in hex form, ie: "[::1]:11211" (dplummer, #428)
- Add symbol support for namespace (jingkai #431)
- Support expiration intervals longer than 30 days (leonid-shevtsov #436)
2.7.0
==========
- BREAKING CHANGE:
Dalli::Client#add and #replace now return a truthy value, not boolean true or false.
- Multithreading support with dalli\_store:
Use :pool\_size to create a pool of shared, threadsafe Dalli clients in Rails:
```ruby
config.cache_store = :dalli_store, "cache-1.example.com", "cache-2.example.com", :compress => true, :pool_size => 5, :expires_in => 300
```
This will ensure the Rails.cache singleton does not become a source of contention.
**PLEASE NOTE** Rails's :mem\_cache\_store does not support pooling as of
Rails 4.0. You must use :dalli\_store.
- Implement `version` for retrieving version of connected servers [dterei, #384]
- Implement `fetch_multi` for batched read/write [sorentwo, #380]
- Add more support for safe updates with multiple writers: [philipmw, #395]
`require 'dalli/cas/client'` augments Dalli::Client with the following methods:
* Get value with CAS: `[value, cas] = get_cas(key)`
`get_cas(key) {|value, cas| ...}`
* Get multiple values with CAS: `get_multi_cas(k1, k2, ...) {|value, metadata| cas = metadata[:cas]}`
* Set value with CAS: `new_cas = set_cas(key, value, cas, ttl, options)`
* Replace value with CAS: `replace_cas(key, new_value, cas, ttl, options)`
* Delete value with CAS: `delete_cas(key, cas)`
- Fix bug with get key with "Not found" value [uzzz, #375]
2.6.4
=======
- Fix ADD command, aka `write(unless_exist: true)` (pitr, #365)
- Upgrade test suite from mini\_shoulda to minitest.
- Even more performance improvements for get\_multi (xaop, #331)
2.6.3
=======
- Support specific stats by passing `:items` or `:slabs` to `stats` method [bukhamseen]
- Fix 'can't modify frozen String' errors in `ActiveSupport::Cache::DalliStore` [dblock]
- Protect against objects with custom equality checking [theron17]
- Warn if value for key is too large to store [locriani]
2.6.2
=======
- Properly handle missing RubyInline
2.6.1
=======
- Add optional native C binary search for ring, add:
gem 'RubyInline'
to your Gemfile to get a 10% speedup when using many servers.
You will see no improvement if you are only using one server.
- More get_multi performance optimization [xaop, #315]
- Add lambda support for cache namespaces [joshwlewis, #311]
2.6.0
=======
- read_multi optimization, now checks local_cache [chendo, #306]
- Re-implement get_multi to be non-blocking [tmm1, #295]
- Add `dalli` accessor to dalli_store to access the underlying
Dalli::Client, for things like `get_multi`.
- Add `Dalli::GzipCompressor`, primarily for compatibility with nginx's HttpMemcachedModule using `memcached_gzip_flag`
2.5.0
=======
- Don't escape non-ASCII keys, memcached binary protocol doesn't care. [#257]
- :dalli_store now implements LocalCache [#236]
- Removed lots of old session_store test code, tests now all run without a default memcached server [#275]
- Changed Dalli ActiveSupport adapter to always attempt instrumentation [brianmario, #284]
- Change write operations (add/set/replace) to return false when value is too large to store [brianmario, #283]
- Allowing different compressors per client [naseem]
2.4.0
=======
- Added the ability to swap out the compressed used to [de]compress cache data [brianmario, #276]
- Fix get\_multi performance issues with lots of memcached servers [tmm1]
- Throw more specific exceptions [tmm1]
- Allowing different types of serialization per client [naseem]
2.3.0
=======
- Added the ability to swap out the serializer used to [de]serialize cache data [brianmario, #274]
2.2.1
=======
- Fix issues with ENV-based connections. [#266]
- Fix problem with SessionStore in Rails 4.0 [#265]
2.2.0
=======
- Add Rack session with\_lock helper, for Rails 4.0 support [#264]
- Accept connection string in the form of a URL (e.g., memcached://user:pass@hostname:port) [glenngillen]
- Add touch operation [#228, uzzz]
2.1.0
=======
- Add Railtie to auto-configure Dalli when included in Gemfile [#217, steveklabnik]
2.0.5
=======
- Create proper keys for arrays of objects passed as keys [twinturbo, #211]
- Handle long key with namespace [#212]
- Add NODELAY to TCP socket options [#206]
2.0.4
=======
- Dalli no longer needs to be reset after Unicorn/Passenger fork [#208]
- Add option to re-raise errors rescued in the session and cache stores. [pitr, #200]
- DalliStore#fetch called the block if the cached value == false [#205]
- DalliStore should have accessible options [#195]
- Add silence and mute support for DalliStore [#207]
- Tracked down and fixed socket corruption due to Timeout [#146]
2.0.3
=======
- Allow proper retrieval of stored `false` values [laserlemon, #197]
- Allow non-ascii and whitespace keys, only the text protocol has those restrictions [#145]
- Fix DalliStore#delete error-handling [#196]
2.0.2
=======
- Fix all dalli\_store operations to handle nil options [#190]
- Increment and decrement with :initial => nil now return nil (lawrencepit, #112)
2.0.1
=======
- Fix nil option handling in dalli\_store#write [#188]
2.0.0
=======
- Reimplemented the Rails' dalli\_store to remove use of
ActiveSupport::Cache::Entry which added 109 bytes overhead to every
value stored, was a performance bottleneck and duplicated a lot of
functionality already in Dalli. One benchmark went from 4.0 sec to 3.0
sec with the new dalli\_store. [#173]
- Added reset\_stats operation [#155]
- Added support for configuring keepalive on TCP connections to memcached servers (@bianster, #180)
Notes:
* data stored with dalli\_store 2.x is NOT backwards compatible with 1.x.
Upgraders are advised to namespace their keys and roll out the 2.x
upgrade slowly so keys do not clash and caches are warmed.
`config.cache_store = :dalli_store, :expires_in => 24.hours.to_i, :namespace => 'myapp2'`
* data stored with plain Dalli::Client API is unchanged.
* removed support for dalli\_store's race\_condition\_ttl option.
* removed support for em-synchrony and unix socket connection options.
* removed support for Ruby 1.8.6
* removed memcache-client compability layer and upgrade documentation.
1.1.5
=======
- Coerce input to incr/decr to integer via #to\_i [#165]
- Convert test suite to minitest/spec (crigor, #166)
- Fix encoding issue with keys [#162]
- Fix double namespacing with Rails and dalli\_store. [#160]
1.1.4
=======
- Use 127.0.0.1 instead of localhost as default to avoid IPv6 issues
- Extend DalliStore's :expires\_in when :race\_condition\_ttl is also used.
- Fix :expires\_in option not propogating from DalliStore to Client, GH-136
- Added support for native Rack session store. Until now, Dalli's
session store has required Rails. Now you can use Dalli to store
sessions for any Rack application.
require 'rack/session/dalli'
use Rack::Session::Dalli, :memcache_server => 'localhost:11211', :compression => true
1.1.3
=======
- Support Rails's autoloading hack for loading sessions with objects
whose classes have not be required yet, GH-129
- Support Unix sockets for connectivity. Shows a 2x performance
increase but keep in mind they only work on localhost. (dfens)
1.1.2
=======
- Fix incompatibility with latest Rack session API when destroying
sessions, thanks @twinge!
1.1.1
=======
v1.1.0 was a bad release. Yanked.
1.1.0
=======
- Remove support for Rails 2.3, add support for Rails 3.1
- Fix socket failure retry logic, now you can restart memcached and Dalli won't complain!
- Add support for fibered operation via em-synchrony (eliaslevy)
- Gracefully handle write timeouts, GH-99
- Only issue bug warning for unexpected StandardErrors, GH-102
- Add travis-ci build support (ryanlecompte)
- Gracefully handle errors in get_multi (michaelfairley)
- Misc fixes from crash2burn, fphilipe, igreg, raggi
1.0.5
=======
- Fix socket failure retry logic, now you can restart memcached and Dalli won't complain!
1.0.4
=======
- Handle non-ASCII key content in dalli_store
- Accept key array for read_multi in dalli_store
- Fix multithreaded race condition in creation of mutex
1.0.3
=======
- Better handling of application marshalling errors
- Work around jruby IO#sysread compatibility issue
1.0.2
=======
- Allow browser session cookies (blindsey)
- Compatibility fixes (mwynholds)
- Add backwards compatibility module for memcache-client, require 'dalli/memcache-client'. It makes
Dalli more compatible with memcache-client and prints out a warning any time you do something that
is no longer supported so you can fix your code.
1.0.1
=======
- Explicitly handle application marshalling bugs, GH-56
- Add support for username/password as options, to allow multiple bucket access
from the same Ruby process, GH-52
- Add support for >1MB values with :value_max_bytes option, GH-54 (r-stu31)
- Add support for default TTL, :expires_in, in Rails 2.3. (Steven Novotny)
config.cache_store = :dalli_store, 'localhost:11211', {:expires_in => 4.hours}
1.0.0
=======
Welcome gucki as a Dalli committer!
- Fix network and namespace issues in get_multi (gucki)
- Better handling of unmarshalling errors (mperham)
0.11.2
=======
- Major reworking of socket error and failover handling (gucki)
- Add basic JRuby support (mperham)
0.11.1
======
- Minor fixes, doc updates.
- Add optional support for kgio sockets, gives a 10-15% performance boost.
0.11.0
======
Warning: this release changes how Dalli marshals data. I do not guarantee compatibility until 1.0 but I will increment the minor version every time a release breaks compatibility until 1.0.
IT IS HIGHLY RECOMMENDED YOU FLUSH YOUR CACHE BEFORE UPGRADING.
- multi() now works reentrantly.
- Added new Dalli::Client option for default TTLs, :expires_in, defaults to 0 (aka forever).
- Added new Dalli::Client option, :compression, to enable auto-compression of values.
- Refactor how Dalli stores data on the server. Values are now tagged
as "marshalled" or "compressed" so they can be automatically deserialized
without the client having to know how they were stored.
0.10.1
======
- Prefer server config from environment, fixes Heroku session store issues (thanks JoshMcKin)
- Better handling of non-ASCII values (size -> bytesize)
- Assert that keys are ASCII only
0.10.0
======
Warning: this release changed how Rails marshals data with Dalli. Unfortunately previous versions double marshalled values. It is possible that data stored with previous versions of Dalli will not work with this version.
IT IS HIGHLY RECOMMENDED YOU FLUSH YOUR CACHE BEFORE UPGRADING.
- Rework how the Rails cache store does value marshalling.
- Rework old server version detection to avoid a socket read hang.
- Refactor the Rails 2.3 :dalli\_store to be closer to :mem\_cache\_store.
- Better documentation for session store config (plukevdh)
0.9.10
----
- Better server retry logic (next2you)
- Rails 3.1 compatibility (gucki)
0.9.9
----
- Add support for *_multi operations for add, set, replace and delete. This implements
pipelined network operations; Dalli disables network replies so we're not limited by
latency, allowing for much higher throughput.
dc = Dalli::Client.new
dc.multi do
dc.set 'a', 1
dc.set 'b', 2
dc.set 'c', 3
dc.delete 'd'
end
- Minor fix to set the continuum sorted by value (kangster)
- Implement session store with Rails 2.3. Update docs.
0.9.8
-----
- Implement namespace support
- Misc fixes
0.9.7
-----
- Small fix for NewRelic integration.
- Detect and fail on older memcached servers (pre-1.4).
0.9.6
-----
- Patches for Rails 3.0.1 integration.
0.9.5
-----
- Major design change - raw support is back to maximize compatibility with Rails
and the increment/decrement operations. You can now pass :raw => true to most methods
to bypass (un)marshalling.
- Support symbols as keys (ddollar)
- Rails 2.3 bug fixes
0.9.4
-----
- Dalli support now in rack-bug (http://github.com/brynary/rack-bug), give it a try!
- Namespace support for Rails 2.3 (bpardee)
- Bug fixes
0.9.3
-----
- Rails 2.3 support (beanieboi)
- Rails SessionStore support
- Passenger integration
- memcache-client upgrade docs, see Upgrade.md
0.9.2
----
- Verify proper operation in Heroku.
0.9.1
----
- Add fetch and cas operations (mperham)
- Add incr and decr operations (mperham)
- Initial support for SASL authentication via the MEMCACHE_{USERNAME,PASSWORD} environment variables, needed for Heroku (mperham)
0.9.0
-----
- Initial gem release.
dalli-3.2.8/Gemfile 0000664 0000000 0000000 00000000566 14562521034 0014115 0 ustar 00root root 0000000 0000000 # frozen_string_literal: true
source 'https://rubygems.org'
gemspec
group :development, :test do
gem 'connection_pool'
gem 'minitest', '~> 5'
gem 'rack', '~> 2.0', '>= 2.2.0'
gem 'rake', '~> 13.0'
gem 'rubocop'
gem 'rubocop-minitest'
gem 'rubocop-performance'
gem 'rubocop-rake'
gem 'simplecov'
end
group :test do
gem 'ruby-prof', platform: :mri
end
dalli-3.2.8/LICENSE 0000664 0000000 0000000 00000002056 14562521034 0013623 0 ustar 00root root 0000000 0000000 Copyright (c) Peter M. Goldstein, Mike Perham
Permission is hereby granted, free of charge, to any person obtaining
a copy of this software and associated documentation files (the
"Software"), to deal in the Software without restriction, including
without limitation the rights to use, copy, modify, merge, publish,
distribute, sublicense, and/or sell copies of the Software, and to
permit persons to whom the Software is furnished to do so, subject to
the following conditions:
The above copyright notice and this permission notice shall be
included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
dalli-3.2.8/Performance.md 0000664 0000000 0000000 00000004261 14562521034 0015401 0 ustar 00root root 0000000 0000000 Performance
====================
Caching is all about performance, so I carefully track Dalli performance to ensure no regressions.
You can optionally use kgio to give Dalli a 10-20% performance boost: `gem install kgio`.
Note I've added some benchmarks over time to Dalli that the other libraries don't necessarily have.
memcache-client
---------------
Testing 1.8.5 with ruby 1.9.3p0 (2011-10-30 revision 33570) [x86_64-darwin11.2.0]
user system total real
set:plain:memcache-client 1.860000 0.310000 2.170000 ( 2.188030)
set:ruby:memcache-client 1.830000 0.290000 2.120000 ( 2.130212)
get:plain:memcache-client 1.830000 0.340000 2.170000 ( 2.176156)
get:ruby:memcache-client 1.900000 0.330000 2.230000 ( 2.235045)
multiget:ruby:memcache-client 0.860000 0.120000 0.980000 ( 0.987348)
missing:ruby:memcache-client 1.630000 0.320000 1.950000 ( 1.954867)
mixed:ruby:memcache-client 3.690000 0.670000 4.360000 ( 4.364469)
dalli
-----
Testing with Rails 3.2.1
Using kgio socket IO
Testing 2.0.0 with ruby 1.9.3p125 (2012-02-16 revision 34643) [x86_64-darwin11.3.0]
user system total real
mixed:rails:dalli 1.580000 0.570000 2.150000 ( 3.008839)
set:plain:dalli 0.730000 0.300000 1.030000 ( 1.567098)
setq:plain:dalli 0.520000 0.120000 0.640000 ( 0.634402)
set:ruby:dalli 0.800000 0.300000 1.100000 ( 1.640348)
get:plain:dalli 0.840000 0.330000 1.170000 ( 1.668425)
get:ruby:dalli 0.850000 0.330000 1.180000 ( 1.665716)
multiget:ruby:dalli 0.700000 0.260000 0.960000 ( 0.965423)
missing:ruby:dalli 0.720000 0.320000 1.040000 ( 1.511720)
mixed:ruby:dalli 1.660000 0.640000 2.300000 ( 3.320743)
mixedq:ruby:dalli 1.630000 0.510000 2.140000 ( 2.629734)
incr:ruby:dalli 0.270000 0.100000 0.370000 ( 0.547618)
dalli-3.2.8/README.md 0000664 0000000 0000000 00000005623 14562521034 0014100 0 ustar 00root root 0000000 0000000 Dalli [](https://github.com/petergoldstein/dalli/actions/workflows/tests.yml)
=====
Dalli is a high performance pure Ruby client for accessing memcached servers.
Dalli supports:
* Simple and complex memcached configurations
* Failover between memcached instances
* Fine-grained control of data serialization and compression
* Thread-safe operation (either through use of a connection pool, or by using the Dalli client in threadsafe mode)
* SSL/TLS connections to memcached
* SASL authentication
The name is a variant of Salvador Dali for his famous painting [The Persistence of Memory](http://en.wikipedia.org/wiki/The_Persistence_of_Memory).

## Documentation and Information
* [User Documentation](https://github.com/petergoldstein/dalli/wiki) - The documentation is maintained in the repository's wiki.
* [Announcements](https://github.com/petergoldstein/dalli/discussions/categories/announcements) - Announcements of interest to the Dalli community will be posted here.
* [Bug Reports](https://github.com/petergoldstein/dalli/issues) - If you discover a problem with Dalli, please submit a bug report in the tracker.
* [Forum](https://github.com/petergoldstein/dalli/discussions/categories/q-a) - If you have questions about Dalli, please post them here.
* [Client API](https://www.rubydoc.info/gems/dalli) - Ruby documentation for the `Dalli::Client` API
## Development
After checking out the repo, run `bin/setup` to install dependencies. You can run `bin/console` for an interactive prompt that will allow you to experiment.
To install this gem onto your local machine, run `bundle exec rake install`.
## Contributing
If you have a fix you wish to provide, please fork the code, fix in your local project and then send a pull request on github. Please ensure that you include a test which verifies your fix and update the [changelog](CHANGELOG.md) with a one sentence description of your fix so you get credit as a contributor.
## Appreciation
Dalli would not exist in its current form without the contributions of many people. But special thanks go to several individuals and organizations:
* Mike Perham - for originally authoring the Dalli project and serving as maintainer and primary contributor for many years
* Eric Wong - for help using his [kgio](http://bogomips.org/kgio/) library.
* Brian Mitchell - for his remix-stash project which was helpful when implementing and testing the binary protocol support.
* [CouchBase](http://couchbase.com) - for their sponsorship of the original development
## Authors
* [Peter M. Goldstein](https://github.com/petergoldstein) - current maintainer
* [Mike Perham](https://github.com/mperham) and contributors
## Copyright
Copyright (c) Mike Perham, Peter M. Goldstein. See LICENSE for details.
dalli-3.2.8/Rakefile 0000664 0000000 0000000 00000000465 14562521034 0014265 0 ustar 00root root 0000000 0000000 # frozen_string_literal: true
require 'bundler/gem_tasks'
require 'rake/testtask'
Rake::TestTask.new(:test) do |test|
test.pattern = 'test/**/test_*.rb'
test.warning = true
test.verbose = true
end
task default: :test
Rake::TestTask.new(:bench) do |test|
test.pattern = 'test/benchmark_test.rb'
end
dalli-3.2.8/bin/ 0000775 0000000 0000000 00000000000 14562521034 0013363 5 ustar 00root root 0000000 0000000 dalli-3.2.8/bin/console 0000775 0000000 0000000 00000000562 14562521034 0014756 0 ustar 00root root 0000000 0000000 #!/usr/bin/env ruby
# frozen_string_literal: true
require 'bundler/setup'
require 'dalli'
# You can add fixtures and/or initialization code here to make experimenting
# with your gem easier. You can also use a different console, if you like.
# (If you use this, don't forget to add pry to your Gemfile!)
# require "pry"
# Pry.start
require 'irb'
IRB.start(__FILE__)
dalli-3.2.8/bin/setup 0000775 0000000 0000000 00000000203 14562521034 0014444 0 ustar 00root root 0000000 0000000 #!/usr/bin/env bash
set -euo pipefail
IFS=$'\n\t'
set -vx
bundle install
# Do any other automated setup that you need to do here
dalli-3.2.8/code_of_conduct.md 0000664 0000000 0000000 00000004542 14562521034 0016257 0 ustar 00root root 0000000 0000000 # Contributor Code of Conduct
As contributors and maintainers of this project, and in the interest of
fostering an open and welcoming community, we pledge to respect all people who
contribute through reporting issues, posting feature requests, updating
documentation, submitting pull requests or patches, and other activities.
We are committed to making participation in this project a harassment-free
experience for everyone, regardless of level of experience, gender, gender
identity and expression, sexual orientation, disability, personal appearance,
body size, race, ethnicity, age, religion, or nationality.
Examples of unacceptable behavior by participants include:
* The use of sexualized language or imagery
* Personal attacks
* Trolling or insulting/derogatory comments
* Public or private harassment
* Publishing other's private information, such as physical or electronic
addresses, without explicit permission
* Other unethical or unprofessional conduct
Project maintainers have the right and responsibility to remove, edit, or
reject comments, commits, code, wiki edits, issues, and other contributions
that are not aligned to this Code of Conduct, or to ban temporarily or
permanently any contributor for other behaviors that they deem inappropriate,
threatening, offensive, or harmful.
By adopting this Code of Conduct, project maintainers commit themselves to
fairly and consistently applying these principles to every aspect of managing
this project. Project maintainers who do not follow or enforce the Code of
Conduct may be permanently removed from the project team.
This Code of Conduct applies both within project spaces and in public spaces
when an individual is representing the project or its community.
Instances of abusive, harassing, or otherwise unacceptable behavior may be
reported by contacting the project maintainer at peter.m.goldstein AT gmail.com. All
complaints will be reviewed and investigated and will result in a response that
is deemed necessary and appropriate to the circumstances. Maintainers are
obligated to maintain confidentiality with regard to the reporter of an
incident.
This Code of Conduct is adapted from the [Contributor Covenant][homepage],
version 1.3.0, available at
[http://contributor-covenant.org/version/1/3/0/][version]
[homepage]: http://contributor-covenant.org
[version]: http://contributor-covenant.org/version/1/3/0/
dalli-3.2.8/dalli.gemspec 0000664 0000000 0000000 00000001416 14562521034 0015247 0 ustar 00root root 0000000 0000000 # frozen_string_literal: true
require './lib/dalli/version'
Gem::Specification.new do |s|
s.name = 'dalli'
s.version = Dalli::VERSION
s.license = 'MIT'
s.authors = ['Peter M. Goldstein', 'Mike Perham']
s.description = s.summary = 'High performance memcached client for Ruby'
s.email = ['peter.m.goldstein@gmail.com', 'mperham@gmail.com']
s.files = Dir.glob('lib/**/*') + [
'LICENSE',
'README.md',
'CHANGELOG.md',
'Gemfile'
]
s.homepage = 'https://github.com/petergoldstein/dalli'
s.required_ruby_version = '>= 2.6'
s.metadata = {
'bug_tracker_uri' => 'https://github.com/petergoldstein/dalli/issues',
'changelog_uri' => 'https://github.com/petergoldstein/dalli/blob/main/CHANGELOG.md',
'rubygems_mfa_required' => 'true'
}
end
dalli-3.2.8/lib/ 0000775 0000000 0000000 00000000000 14562521034 0013361 5 ustar 00root root 0000000 0000000 dalli-3.2.8/lib/dalli.rb 0000664 0000000 0000000 00000004250 14562521034 0014774 0 ustar 00root root 0000000 0000000 # frozen_string_literal: true
##
# Namespace for all Dalli code.
##
module Dalli
autoload :Server, 'dalli/server'
# generic error
class DalliError < RuntimeError; end
# socket/server communication error
class NetworkError < DalliError; end
# no server available/alive error
class RingError < DalliError; end
# application error in marshalling serialization
class MarshalError < DalliError; end
# application error in marshalling deserialization or decompression
class UnmarshalError < DalliError; end
# payload too big for memcached
class ValueOverMaxSize < DalliError; end
# operation is not permitted in a multi block
class NotPermittedMultiOpError < DalliError; end
# Implements the NullObject pattern to store an application-defined value for 'Key not found' responses.
class NilObject; end # rubocop:disable Lint/EmptyClass
NOT_FOUND = NilObject.new
QUIET = :dalli_multi
def self.logger
@logger ||= rails_logger || default_logger
end
def self.rails_logger
(defined?(Rails) && Rails.respond_to?(:logger) && Rails.logger) ||
(defined?(RAILS_DEFAULT_LOGGER) && RAILS_DEFAULT_LOGGER.respond_to?(:debug) && RAILS_DEFAULT_LOGGER)
end
def self.default_logger
require 'logger'
l = Logger.new($stdout)
l.level = Logger::INFO
l
end
def self.logger=(logger)
@logger = logger
end
end
require_relative 'dalli/version'
require_relative 'dalli/compressor'
require_relative 'dalli/client'
require_relative 'dalli/key_manager'
require_relative 'dalli/pipelined_getter'
require_relative 'dalli/ring'
require_relative 'dalli/protocol'
require_relative 'dalli/protocol/base'
require_relative 'dalli/protocol/binary'
require_relative 'dalli/protocol/connection_manager'
require_relative 'dalli/protocol/meta'
require_relative 'dalli/protocol/response_buffer'
require_relative 'dalli/protocol/server_config_parser'
require_relative 'dalli/protocol/ttl_sanitizer'
require_relative 'dalli/protocol/value_compressor'
require_relative 'dalli/protocol/value_marshaller'
require_relative 'dalli/protocol/value_serializer'
require_relative 'dalli/servers_arg_normalizer'
require_relative 'dalli/socket'
require_relative 'dalli/options'
dalli-3.2.8/lib/dalli/ 0000775 0000000 0000000 00000000000 14562521034 0014446 5 ustar 00root root 0000000 0000000 dalli-3.2.8/lib/dalli/cas/ 0000775 0000000 0000000 00000000000 14562521034 0015214 5 ustar 00root root 0000000 0000000 dalli-3.2.8/lib/dalli/cas/client.rb 0000664 0000000 0000000 00000000221 14562521034 0017012 0 ustar 00root root 0000000 0000000 # frozen_string_literal: true
puts "You can remove `require 'dalli/cas/client'` as this code has been rolled into the standard 'dalli/client'."
dalli-3.2.8/lib/dalli/client.rb 0000664 0000000 0000000 00000035655 14562521034 0016267 0 ustar 00root root 0000000 0000000 # frozen_string_literal: true
require 'digest/md5'
require 'set'
# encoding: ascii
module Dalli
##
# Dalli::Client is the main class which developers will use to interact with
# Memcached.
##
class Client
##
# Dalli::Client is the main class which developers will use to interact with
# the memcached server. Usage:
#
# Dalli::Client.new(['localhost:11211:10',
# 'cache-2.example.com:11211:5',
# '192.168.0.1:22122:5',
# '/var/run/memcached/socket'],
# failover: true, expires_in: 300)
#
# servers is an Array of "host:port:weight" where weight allows you to distribute cache unevenly.
# Both weight and port are optional. If you pass in nil, Dalli will use the MEMCACHE_SERVERS
# environment variable or default to 'localhost:11211' if it is not present. Dalli also supports
# the ability to connect to Memcached on localhost through a UNIX socket. To use this functionality,
# use a full pathname (beginning with a slash character '/') in place of the "host:port" pair in
# the server configuration.
#
# Options:
# - :namespace - prepend each key with this value to provide simple namespacing.
# - :failover - if a server is down, look for and store values on another server in the ring. Default: true.
# - :threadsafe - ensure that only one thread is actively using a socket at a time. Default: true.
# - :expires_in - default TTL in seconds if you do not pass TTL as a parameter to an individual operation, defaults
# to 0 or forever.
# - :compress - if true Dalli will compress values larger than compression_min_size bytes before sending them
# to memcached. Default: true.
# - :compression_min_size - the minimum size (in bytes) for which Dalli will compress values sent to Memcached.
# Defaults to 4K.
# - :serializer - defaults to Marshal
# - :compressor - defaults to Dalli::Compressor, a Zlib-based implementation
# - :cache_nils - defaults to false, if true Dalli will not treat cached nil values as 'not found' for
# #fetch operations.
# - :digest_class - defaults to Digest::MD5, allows you to pass in an object that responds to the hexdigest method,
# useful for injecting a FIPS compliant hash object.
# - :protocol - one of either :binary or :meta, defaulting to :binary. This sets the protocol that Dalli uses
# to communicate with memcached.
#
def initialize(servers = nil, options = {})
@normalized_servers = ::Dalli::ServersArgNormalizer.normalize_servers(servers)
@options = normalize_options(options)
@key_manager = ::Dalli::KeyManager.new(@options)
@ring = nil
end
#
# The standard memcached instruction set
#
##
# Get the value associated with the key.
# If a value is not found, then +nil+ is returned.
def get(key, req_options = nil)
perform(:get, key, req_options)
end
##
# Gat (get and touch) fetch an item and simultaneously update its expiration time.
#
# If a value is not found, then +nil+ is returned.
def gat(key, ttl = nil)
perform(:gat, key, ttl_or_default(ttl))
end
##
# Touch updates expiration time for a given key.
#
# Returns true if key exists, otherwise nil.
def touch(key, ttl = nil)
resp = perform(:touch, key, ttl_or_default(ttl))
resp.nil? ? nil : true
end
##
# Get the value and CAS ID associated with the key. If a block is provided,
# value and CAS will be passed to the block.
def get_cas(key)
(value, cas) = perform(:cas, key)
return [value, cas] unless block_given?
yield value, cas
end
##
# Fetch multiple keys efficiently.
# If a block is given, yields key/value pairs one at a time.
# Otherwise returns a hash of { 'key' => 'value', 'key2' => 'value1' }
def get_multi(*keys)
keys.flatten!
keys.compact!
return {} if keys.empty?
if block_given?
pipelined_getter.process(keys) { |k, data| yield k, data.first }
else
{}.tap do |hash|
pipelined_getter.process(keys) { |k, data| hash[k] = data.first }
end
end
end
##
# Fetch multiple keys efficiently, including available metadata such as CAS.
# If a block is given, yields key/data pairs one a time. Data is an array:
# [value, cas_id]
# If no block is given, returns a hash of
# { 'key' => [value, cas_id] }
def get_multi_cas(*keys)
if block_given?
pipelined_getter.process(keys) { |*args| yield(*args) }
else
{}.tap do |hash|
pipelined_getter.process(keys) { |k, data| hash[k] = data }
end
end
end
# Fetch the value associated with the key.
# If a value is found, then it is returned.
#
# If a value is not found and no block is given, then nil is returned.
#
# If a value is not found (or if the found value is nil and :cache_nils is false)
# and a block is given, the block will be invoked and its return value
# written to the cache and returned.
def fetch(key, ttl = nil, req_options = nil)
req_options = req_options.nil? ? CACHE_NILS : req_options.merge(CACHE_NILS) if cache_nils
val = get(key, req_options)
return val unless block_given? && not_found?(val)
new_val = yield
add(key, new_val, ttl_or_default(ttl), req_options)
new_val
end
##
# compare and swap values using optimistic locking.
# Fetch the existing value for key.
# If it exists, yield the value to the block.
# Add the block's return value as the new value for the key.
# Add will fail if someone else changed the value.
#
# Returns:
# - nil if the key did not exist.
# - false if the value was changed by someone else.
# - true if the value was successfully updated.
def cas(key, ttl = nil, req_options = nil, &block)
cas_core(key, false, ttl, req_options, &block)
end
##
# like #cas, but will yield to the block whether or not the value
# already exists.
#
# Returns:
# - false if the value was changed by someone else.
# - true if the value was successfully updated.
def cas!(key, ttl = nil, req_options = nil, &block)
cas_core(key, true, ttl, req_options, &block)
end
##
# Turn on quiet aka noreply support for a number of
# memcached operations.
#
# All relevant operations within this block will be effectively
# pipelined as Dalli will use 'quiet' versions. The invoked methods
# will all return nil, rather than their usual response. Method
# latency will be substantially lower, as the caller will not be
# blocking on responses.
#
# Currently supports storage (set, add, replace, append, prepend),
# arithmetic (incr, decr), flush and delete operations. Use of
# unsupported operations inside a block will raise an error.
#
# Any error replies will be discarded at the end of the block, and
# Dalli client methods invoked inside the block will not
# have return values
def quiet
old = Thread.current[::Dalli::QUIET]
Thread.current[::Dalli::QUIET] = true
yield
ensure
@ring&.pipeline_consume_and_ignore_responses
Thread.current[::Dalli::QUIET] = old
end
alias multi quiet
def set(key, value, ttl = nil, req_options = nil)
set_cas(key, value, 0, ttl, req_options)
end
##
# Set the key-value pair, verifying existing CAS.
# Returns the resulting CAS value if succeeded, and falsy otherwise.
def set_cas(key, value, cas, ttl = nil, req_options = nil)
perform(:set, key, value, ttl_or_default(ttl), cas, req_options)
end
##
# Conditionally add a key/value pair, if the key does not already exist
# on the server. Returns truthy if the operation succeeded.
def add(key, value, ttl = nil, req_options = nil)
perform(:add, key, value, ttl_or_default(ttl), req_options)
end
##
# Conditionally add a key/value pair, only if the key already exists
# on the server. Returns truthy if the operation succeeded.
def replace(key, value, ttl = nil, req_options = nil)
replace_cas(key, value, 0, ttl, req_options)
end
##
# Conditionally add a key/value pair, verifying existing CAS, only if the
# key already exists on the server. Returns the new CAS value if the
# operation succeeded, or falsy otherwise.
def replace_cas(key, value, cas, ttl = nil, req_options = nil)
perform(:replace, key, value, ttl_or_default(ttl), cas, req_options)
end
# Delete a key/value pair, verifying existing CAS.
# Returns true if succeeded, and falsy otherwise.
def delete_cas(key, cas = 0)
perform(:delete, key, cas)
end
def delete(key)
delete_cas(key, 0)
end
##
# Append value to the value already stored on the server for 'key'.
# Appending only works for values stored with :raw => true.
def append(key, value)
perform(:append, key, value.to_s)
end
##
# Prepend value to the value already stored on the server for 'key'.
# Prepending only works for values stored with :raw => true.
def prepend(key, value)
perform(:prepend, key, value.to_s)
end
##
# Incr adds the given amount to the counter on the memcached server.
# Amt must be a positive integer value.
#
# If default is nil, the counter must already exist or the operation
# will fail and will return nil. Otherwise this method will return
# the new value for the counter.
#
# Note that the ttl will only apply if the counter does not already
# exist. To increase an existing counter and update its TTL, use
# #cas.
#
# If the value already exists, it must have been set with raw: true
def incr(key, amt = 1, ttl = nil, default = nil)
check_positive!(amt)
perform(:incr, key, amt.to_i, ttl_or_default(ttl), default)
end
##
# Decr subtracts the given amount from the counter on the memcached server.
# Amt must be a positive integer value.
#
# memcached counters are unsigned and cannot hold negative values. Calling
# decr on a counter which is 0 will just return 0.
#
# If default is nil, the counter must already exist or the operation
# will fail and will return nil. Otherwise this method will return
# the new value for the counter.
#
# Note that the ttl will only apply if the counter does not already
# exist. To decrease an existing counter and update its TTL, use
# #cas.
#
# If the value already exists, it must have been set with raw: true
def decr(key, amt = 1, ttl = nil, default = nil)
check_positive!(amt)
perform(:decr, key, amt.to_i, ttl_or_default(ttl), default)
end
##
# Flush the memcached server, at 'delay' seconds in the future.
# Delay defaults to zero seconds, which means an immediate flush.
##
def flush(delay = 0)
ring.servers.map { |s| s.request(:flush, delay) }
end
alias flush_all flush
ALLOWED_STAT_KEYS = %i[items slabs settings].freeze
##
# Collect the stats for each server.
# You can optionally pass a type including :items, :slabs or :settings to get specific stats
# Returns a hash like { 'hostname:port' => { 'stat1' => 'value1', ... }, 'hostname2:port' => { ... } }
def stats(type = nil)
type = nil unless ALLOWED_STAT_KEYS.include? type
values = {}
ring.servers.each do |server|
values[server.name.to_s] = server.alive? ? server.request(:stats, type.to_s) : nil
end
values
end
##
# Reset stats for each server.
def reset_stats
ring.servers.map do |server|
server.alive? ? server.request(:reset_stats) : nil
end
end
##
## Version of the memcache servers.
def version
values = {}
ring.servers.each do |server|
values[server.name.to_s] = server.alive? ? server.request(:version) : nil
end
values
end
##
## Make sure memcache servers are alive, or raise an Dalli::RingError
def alive!
ring.server_for_key('')
end
##
# Close our connection to each server.
# If you perform another operation after this, the connections will be re-established.
def close
@ring&.close
@ring = nil
end
alias reset close
CACHE_NILS = { cache_nils: true }.freeze
def not_found?(val)
cache_nils ? val == ::Dalli::NOT_FOUND : val.nil?
end
def cache_nils
@options[:cache_nils]
end
# Stub method so a bare Dalli client can pretend to be a connection pool.
def with
yield self
end
private
def check_positive!(amt)
raise ArgumentError, "Positive values only: #{amt}" if amt.negative?
end
def cas_core(key, always_set, ttl = nil, req_options = nil)
(value, cas) = perform(:cas, key)
return if value.nil? && !always_set
newvalue = yield(value)
perform(:set, key, newvalue, ttl_or_default(ttl), cas, req_options)
end
##
# Uses the argument TTL or the client-wide default. Ensures
# that the value is an integer
##
def ttl_or_default(ttl)
(ttl || @options[:expires_in]).to_i
rescue NoMethodError
raise ArgumentError, "Cannot convert ttl (#{ttl}) to an integer"
end
def ring
@ring ||= Dalli::Ring.new(@normalized_servers, protocol_implementation, @options)
end
def protocol_implementation
@protocol_implementation ||= case @options[:protocol]&.to_s
when 'meta'
Dalli::Protocol::Meta
else
Dalli::Protocol::Binary
end
end
##
# Chokepoint method for memcached methods with a key argument.
# Validates the key, resolves the key to the appropriate server
# instance, and invokes the memcached method on the appropriate
# server.
#
# This method also forces retries on network errors - when
# a particular memcached instance becomes unreachable, or the
# operational times out.
##
def perform(*all_args)
return yield if block_given?
op, key, *args = all_args
key = key.to_s
key = @key_manager.validate_key(key)
server = ring.server_for_key(key)
server.request(op, key, *args)
rescue NetworkError => e
Dalli.logger.debug { e.inspect }
Dalli.logger.debug { 'retrying request with new server' }
retry
end
def normalize_options(opts)
opts[:expires_in] = opts[:expires_in].to_i if opts[:expires_in]
opts
rescue NoMethodError
raise ArgumentError, "cannot convert :expires_in => #{opts[:expires_in].inspect} to an integer"
end
def pipelined_getter
PipelinedGetter.new(ring, @key_manager)
end
end
end
dalli-3.2.8/lib/dalli/compressor.rb 0000664 0000000 0000000 00000001371 14562521034 0017171 0 ustar 00root root 0000000 0000000 # frozen_string_literal: true
require 'zlib'
require 'stringio'
module Dalli
##
# Default compressor used by Dalli, that uses
# Zlib DEFLATE to compress data.
##
class Compressor
def self.compress(data)
Zlib::Deflate.deflate(data)
end
def self.decompress(data)
Zlib::Inflate.inflate(data)
end
end
##
# Alternate compressor for Dalli, that uses
# Gzip. Gzip adds a checksum to each compressed
# entry.
##
class GzipCompressor
def self.compress(data)
io = StringIO.new(+'', 'w')
gz = Zlib::GzipWriter.new(io)
gz.write(data)
gz.close
io.string
end
def self.decompress(data)
io = StringIO.new(data, 'rb')
Zlib::GzipReader.new(io).read
end
end
end
dalli-3.2.8/lib/dalli/key_manager.rb 0000664 0000000 0000000 00000006743 14562521034 0017267 0 ustar 00root root 0000000 0000000 # frozen_string_literal: true
require 'digest/md5'
module Dalli
##
# This class manages and validates keys sent to Memcached, ensuring
# that they meet Memcached key length requirements, and supporting
# the implementation of optional namespaces on a per-Dalli client
# basis.
##
class KeyManager
MAX_KEY_LENGTH = 250
NAMESPACE_SEPARATOR = ':'
# This is a hard coded md5 for historical reasons
TRUNCATED_KEY_SEPARATOR = ':md5:'
# This is 249 for historical reasons
TRUNCATED_KEY_TARGET_SIZE = 249
DEFAULTS = {
digest_class: ::Digest::MD5
}.freeze
OPTIONS = %i[digest_class namespace].freeze
attr_reader :namespace
def initialize(client_options)
@key_options =
DEFAULTS.merge(client_options.select { |k, _| OPTIONS.include?(k) })
validate_digest_class_option(@key_options)
@namespace = namespace_from_options
end
##
# Validates the key, and transforms as needed.
#
# If the key is nil or empty, raises ArgumentError. Whitespace
# characters are allowed for historical reasons, but likely shouldn't
# be used.
# If the key (with namespace) is shorter than the memcached maximum
# allowed key length, just returns the argument key
# Otherwise computes a "truncated" key that uses a truncated prefix
# combined with a 32-byte hex digest of the whole key.
##
def validate_key(key)
raise ArgumentError, 'key cannot be blank' unless key&.length&.positive?
key = key_with_namespace(key)
key.length > MAX_KEY_LENGTH ? truncated_key(key) : key
end
##
# Returns the key with the namespace prefixed, if a namespace is
# defined. Otherwise just returns the key
##
def key_with_namespace(key)
return key if namespace.nil?
"#{evaluate_namespace}#{NAMESPACE_SEPARATOR}#{key}"
end
def key_without_namespace(key)
return key if namespace.nil?
key.sub(namespace_regexp, '')
end
def digest_class
@digest_class ||= @key_options[:digest_class]
end
def namespace_regexp
return /\A#{Regexp.escape(evaluate_namespace)}:/ if namespace.is_a?(Proc)
@namespace_regexp ||= /\A#{Regexp.escape(namespace)}:/.freeze unless namespace.nil?
end
def validate_digest_class_option(opts)
return if opts[:digest_class].respond_to?(:hexdigest)
raise ArgumentError, 'The digest_class object must respond to the hexdigest method'
end
def namespace_from_options
raw_namespace = @key_options[:namespace]
return nil unless raw_namespace
return raw_namespace.to_s unless raw_namespace.is_a?(Proc)
raw_namespace
end
def evaluate_namespace
return namespace.call.to_s if namespace.is_a?(Proc)
namespace
end
##
# Produces a truncated key, if the raw key is longer than the maximum allowed
# length. The truncated key is produced by generating a hex digest
# of the key, and appending that to a truncated section of the key.
##
def truncated_key(key)
digest = digest_class.hexdigest(key)
"#{key[0, prefix_length(digest)]}#{TRUNCATED_KEY_SEPARATOR}#{digest}"
end
def prefix_length(digest)
return TRUNCATED_KEY_TARGET_SIZE - (TRUNCATED_KEY_SEPARATOR.length + digest.length) if namespace.nil?
# For historical reasons, truncated keys with namespaces had a length of 250 rather
# than 249
TRUNCATED_KEY_TARGET_SIZE + 1 - (TRUNCATED_KEY_SEPARATOR.length + digest.length)
end
end
end
dalli-3.2.8/lib/dalli/options.rb 0000664 0000000 0000000 00000001671 14562521034 0016473 0 ustar 00root root 0000000 0000000 # frozen_string_literal: true
require 'monitor'
module Dalli
# Make Dalli threadsafe by using a lock around all
# public server methods.
#
# Dalli::Protocol::Binary.extend(Dalli::Threadsafe)
#
module Threadsafe
def self.extended(obj)
obj.init_threadsafe
end
def request(opcode, *args)
@lock.synchronize do
super
end
end
def alive?
@lock.synchronize do
super
end
end
def close
@lock.synchronize do
super
end
end
def pipeline_response_setup
@lock.synchronize do
super
end
end
def pipeline_next_responses
@lock.synchronize do
super
end
end
def pipeline_abort
@lock.synchronize do
super
end
end
def lock!
@lock.mon_enter
end
def unlock!
@lock.mon_exit
end
def init_threadsafe
@lock = Monitor.new
end
end
end
dalli-3.2.8/lib/dalli/pid_cache.rb 0000664 0000000 0000000 00000001632 14562521034 0016674 0 ustar 00root root 0000000 0000000 # frozen_string_literal: true
module Dalli
##
# Dalli::PIDCache is a wrapper class for PID checking to avoid system calls when checking the PID.
##
module PIDCache
if !Process.respond_to?(:fork) # JRuby or TruffleRuby
@pid = Process.pid
singleton_class.attr_reader(:pid)
elsif Process.respond_to?(:_fork) # Ruby 3.1+
class << self
attr_reader :pid
def update!
@pid = Process.pid
end
end
update!
##
# Dalli::PIDCache::CoreExt hooks into Process to be able to reset the PID cache after fork
##
module CoreExt
def _fork
child_pid = super
PIDCache.update! if child_pid.zero?
child_pid
end
end
Process.singleton_class.prepend(CoreExt)
else # Ruby 3.0 or older
class << self
def pid
Process.pid
end
end
end
end
end
dalli-3.2.8/lib/dalli/pipelined_getter.rb 0000664 0000000 0000000 00000012341 14562521034 0020317 0 ustar 00root root 0000000 0000000 # frozen_string_literal: true
module Dalli
##
# Contains logic for the pipelined gets implemented by the client.
##
class PipelinedGetter
def initialize(ring, key_manager)
@ring = ring
@key_manager = key_manager
end
##
# Yields, one at a time, keys and their values+attributes.
#
def process(keys, &block)
return {} if keys.empty?
@ring.lock do
servers = setup_requests(keys)
start_time = Time.now
servers = fetch_responses(servers, start_time, @ring.socket_timeout, &block) until servers.empty?
end
rescue NetworkError => e
Dalli.logger.debug { e.inspect }
Dalli.logger.debug { 'retrying pipelined gets because of timeout' }
retry
end
def setup_requests(keys)
groups = groups_for_keys(keys)
make_getkq_requests(groups)
# TODO: How does this exit on a NetworkError
finish_queries(groups.keys)
end
##
# Loop through the server-grouped sets of keys, writing
# the corresponding getkq requests to the appropriate servers
#
# It's worth noting that we could potentially reduce bytes
# on the wire by switching from getkq to getq, and using
# the opaque value to match requests to responses.
##
def make_getkq_requests(groups)
groups.each do |server, keys_for_server|
server.request(:pipelined_get, keys_for_server)
rescue DalliError, NetworkError => e
Dalli.logger.debug { e.inspect }
Dalli.logger.debug { "unable to get keys for server #{server.name}" }
end
end
##
# This loops through the servers that have keys in
# our set, sending the noop to terminate the set of queries.
##
def finish_queries(servers)
deleted = []
servers.each do |server|
next unless server.connected?
begin
finish_query_for_server(server)
rescue Dalli::NetworkError
raise
rescue Dalli::DalliError
deleted.append(server)
end
end
servers.delete_if { |server| deleted.include?(server) }
rescue Dalli::NetworkError
abort_without_timeout(servers)
raise
end
def finish_query_for_server(server)
server.pipeline_response_setup
rescue Dalli::NetworkError
raise
rescue Dalli::DalliError => e
Dalli.logger.debug { e.inspect }
Dalli.logger.debug { "Results from server: #{server.name} will be missing from the results" }
raise
end
# Swallows Dalli::NetworkError
def abort_without_timeout(servers)
servers.each(&:pipeline_abort)
end
def fetch_responses(servers, start_time, timeout, &block)
# Remove any servers which are not connected
servers.delete_if { |s| !s.connected? }
return [] if servers.empty?
time_left = remaining_time(start_time, timeout)
readable_servers = servers_with_response(servers, time_left)
if readable_servers.empty?
abort_with_timeout(servers)
return []
end
# Loop through the servers with responses, and
# delete any from our list that are finished
readable_servers.each do |server|
servers.delete(server) if process_server(server, &block)
end
servers
rescue NetworkError
# Abort and raise if we encountered a network error. This triggers
# a retry at the top level.
abort_without_timeout(servers)
raise
end
def remaining_time(start, timeout)
elapsed = Time.now - start
return 0 if elapsed > timeout
timeout - elapsed
end
# Swallows Dalli::NetworkError
def abort_with_timeout(servers)
abort_without_timeout(servers)
servers.each do |server|
Dalli.logger.debug { "memcached at #{server.name} did not response within timeout" }
end
true # Required to simplify caller
end
# Processes responses from a server. Returns true if there are no
# additional responses from this server.
def process_server(server)
server.pipeline_next_responses.each_pair do |key, value_list|
yield @key_manager.key_without_namespace(key), value_list
end
server.pipeline_complete?
end
def servers_with_response(servers, timeout)
return [] if servers.empty?
# TODO: - This is a bit challenging. Essentially the PipelinedGetter
# is a reactor, but without the benefit of a Fiber or separate thread.
# My suspicion is that we may want to try and push this down into the
# individual servers, but I'm not sure. For now, we keep the
# mapping between the alerted object (the socket) and the
# corrresponding server here.
server_map = servers.each_with_object({}) { |s, h| h[s.sock] = s }
readable, = IO.select(server_map.keys, nil, nil, timeout)
return [] if readable.nil?
readable.map { |sock| server_map[sock] }
end
def groups_for_keys(*keys)
keys.flatten!
keys.map! { |a| @key_manager.validate_key(a.to_s) }
groups = @ring.keys_grouped_by_server(keys)
if (unfound_keys = groups.delete(nil))
Dalli.logger.debug do
"unable to get keys for #{unfound_keys.length} keys " \
'because no matching server was found'
end
end
groups
end
end
end
dalli-3.2.8/lib/dalli/protocol.rb 0000664 0000000 0000000 00000000717 14562521034 0016641 0 ustar 00root root 0000000 0000000 # frozen_string_literal: true
require 'timeout'
module Dalli
module Protocol
# Preserved for backwards compatibility. Should be removed in 4.0
NOT_FOUND = ::Dalli::NOT_FOUND
# Ruby 3.2 raises IO::TimeoutError on blocking reads/writes, but
# it is not defined in earlier Ruby versions.
TIMEOUT_ERRORS =
if defined?(IO::TimeoutError)
[Timeout::Error, IO::TimeoutError]
else
[Timeout::Error]
end
end
end
dalli-3.2.8/lib/dalli/protocol/ 0000775 0000000 0000000 00000000000 14562521034 0016307 5 ustar 00root root 0000000 0000000 dalli-3.2.8/lib/dalli/protocol/base.rb 0000664 0000000 0000000 00000020210 14562521034 0017541 0 ustar 00root root 0000000 0000000 # frozen_string_literal: true
require 'forwardable'
require 'socket'
require 'timeout'
module Dalli
module Protocol
##
# Base class for a single Memcached server, containing logic common to all
# protocols. Contains logic for managing connection state to the server and value
# handling.
##
class Base
extend Forwardable
attr_accessor :weight, :options
def_delegators :@value_marshaller, :serializer, :compressor, :compression_min_size, :compress_by_default?
def_delegators :@connection_manager, :name, :sock, :hostname, :port, :close, :connected?, :socket_timeout,
:socket_type, :up!, :down!, :write, :reconnect_down_server?, :raise_down_error
def initialize(attribs, client_options = {})
hostname, port, socket_type, @weight, user_creds = ServerConfigParser.parse(attribs)
@options = client_options.merge(user_creds)
@value_marshaller = ValueMarshaller.new(@options)
@connection_manager = ConnectionManager.new(hostname, port, socket_type, @options)
end
# Chokepoint method for error handling and ensuring liveness
def request(opkey, *args)
verify_state(opkey)
begin
@connection_manager.start_request!
response = send(opkey, *args)
# pipelined_get emit query but doesn't read the response(s)
@connection_manager.finish_request! unless opkey == :pipelined_get
response
rescue Dalli::MarshalError => e
log_marshal_err(args.first, e)
raise
rescue Dalli::DalliError
raise
rescue StandardError => e
log_unexpected_err(e)
close
raise
end
end
##
# Boolean method used by clients of this class to determine if this
# particular memcached instance is available for use.
def alive?
ensure_connected!
rescue Dalli::NetworkError
# ensure_connected! raises a NetworkError if connection fails. We
# want to capture that error and convert it to a boolean value here.
false
end
def lock!; end
def unlock!; end
# Start reading key/value pairs from this connection. This is usually called
# after a series of GETKQ commands. A NOOP is sent, and the server begins
# flushing responses for kv pairs that were found.
#
# Returns nothing.
def pipeline_response_setup
verify_pipelined_state(:getkq)
write_noop
response_buffer.reset
end
# Attempt to receive and parse as many key/value pairs as possible
# from this server. After #pipeline_response_setup, this should be invoked
# repeatedly whenever this server's socket is readable until
# #pipeline_complete?.
#
# Returns a Hash of kv pairs received.
def pipeline_next_responses
reconnect_on_pipeline_complete!
values = {}
response_buffer.read
status, cas, key, value = response_buffer.process_single_getk_response
# status is not nil only if we have a full response to parse
# in the buffer
until status.nil?
# If the status is ok and key is nil, then this is the response
# to the noop at the end of the pipeline
finish_pipeline && break if status && key.nil?
# If the status is ok and the key is not nil, then this is a
# getkq response with a value that we want to set in the response hash
values[key] = [value, cas] unless key.nil?
# Get the next response from the buffer
status, cas, key, value = response_buffer.process_single_getk_response
end
values
rescue SystemCallError, *TIMEOUT_ERRORS, EOFError => e
@connection_manager.error_on_request!(e)
end
# Abort current pipelined get. Generally used to signal an external
# timeout during pipelined get. The underlying socket is
# disconnected, and the exception is swallowed.
#
# Returns nothing.
def pipeline_abort
response_buffer.clear
@connection_manager.abort_request!
return true unless connected?
# Closes the connection, which ensures that our connection
# is in a clean state for future requests
@connection_manager.error_on_request!('External timeout')
rescue NetworkError
true
end
# Did the last call to #pipeline_response_setup complete successfully?
def pipeline_complete?
!response_buffer.in_progress?
end
def username
@options[:username] || ENV.fetch('MEMCACHE_USERNAME', nil)
end
def password
@options[:password] || ENV.fetch('MEMCACHE_PASSWORD', nil)
end
def require_auth?
!username.nil?
end
def quiet?
Thread.current[::Dalli::QUIET]
end
alias multi? quiet?
# NOTE: Additional public methods should be overridden in Dalli::Threadsafe
private
ALLOWED_QUIET_OPS = %i[add replace set delete incr decr append prepend flush noop].freeze
def verify_allowed_quiet!(opkey)
return if ALLOWED_QUIET_OPS.include?(opkey)
raise Dalli::NotPermittedMultiOpError, "The operation #{opkey} is not allowed in a quiet block."
end
##
# Checks to see if we can execute the specified operation. Checks
# whether the connection is in use, and whether the command is allowed
##
def verify_state(opkey)
@connection_manager.confirm_ready!
verify_allowed_quiet!(opkey) if quiet?
# The ensure_connected call has the side effect of connecting the
# underlying socket if it is not connected, or there's been a disconnect
# because of timeout or other error. Method raises an error
# if it can't connect
raise_down_error unless ensure_connected!
end
def verify_pipelined_state(_opkey)
@connection_manager.confirm_in_progress!
raise_down_error unless connected?
end
# The socket connection to the underlying server is initialized as a side
# effect of this call. In fact, this is the ONLY place where that
# socket connection is initialized.
#
# Both this method and connect need to be in this class so we can do auth
# as required
#
# Since this is invoked exclusively in verify_state!, we don't need to worry about
# thread safety. Using it elsewhere may require revisiting that assumption.
def ensure_connected!
return true if connected?
return false unless reconnect_down_server?
connect # This call needs to be in this class so we can do auth
connected?
end
def cache_nils?(opts)
return false unless opts.is_a?(Hash)
opts[:cache_nils] ? true : false
end
def connect
@connection_manager.establish_connection
authenticate_connection if require_auth?
@version = version # Connect socket if not authed
up!
end
def pipelined_get(keys)
req = +''
keys.each do |key|
req << quiet_get_request(key)
end
# Could send noop here instead of in pipeline_response_setup
write(req)
end
def response_buffer
@response_buffer ||= ResponseBuffer.new(@connection_manager, response_processor)
end
# Called after the noop response is received at the end of a set
# of pipelined gets
def finish_pipeline
response_buffer.clear
@connection_manager.finish_request!
true # to simplify response
end
def reconnect_on_pipeline_complete!
@connection_manager.reconnect! 'pipelined get has completed' if pipeline_complete?
end
def log_marshal_err(key, err)
Dalli.logger.error "Marshalling error for key '#{key}': #{err.message}"
Dalli.logger.error 'You are trying to cache a Ruby object which cannot be serialized to memcached.'
end
def log_unexpected_err(err)
Dalli.logger.error "Unexpected exception during Dalli request: #{err.class.name}: #{err.message}"
Dalli.logger.error err.backtrace.join("\n\t")
end
end
end
end
dalli-3.2.8/lib/dalli/protocol/binary.rb 0000664 0000000 0000000 00000013142 14562521034 0020121 0 ustar 00root root 0000000 0000000 # frozen_string_literal: true
require 'forwardable'
require 'socket'
require 'timeout'
module Dalli
module Protocol
##
# Access point for a single Memcached server, accessed via Memcached's binary
# protocol. Contains logic for managing connection state to the server (retries, etc),
# formatting requests to the server, and unpacking responses.
##
class Binary < Base
def response_processor
@response_processor ||= ResponseProcessor.new(@connection_manager, @value_marshaller)
end
private
# Retrieval Commands
def get(key, options = nil)
req = RequestFormatter.standard_request(opkey: :get, key: key)
write(req)
response_processor.get(cache_nils: cache_nils?(options))
end
def quiet_get_request(key)
RequestFormatter.standard_request(opkey: :getkq, key: key)
end
def gat(key, ttl, options = nil)
ttl = TtlSanitizer.sanitize(ttl)
req = RequestFormatter.standard_request(opkey: :gat, key: key, ttl: ttl)
write(req)
response_processor.get(cache_nils: cache_nils?(options))
end
def touch(key, ttl)
ttl = TtlSanitizer.sanitize(ttl)
write(RequestFormatter.standard_request(opkey: :touch, key: key, ttl: ttl))
response_processor.generic_response
end
# TODO: This is confusing, as there's a cas command in memcached
# and this isn't it. Maybe rename? Maybe eliminate?
def cas(key)
req = RequestFormatter.standard_request(opkey: :get, key: key)
write(req)
response_processor.data_cas_response
end
# Storage Commands
def set(key, value, ttl, cas, options)
opkey = quiet? ? :setq : :set
storage_req(opkey, key, value, ttl, cas, options)
end
def add(key, value, ttl, options)
opkey = quiet? ? :addq : :add
storage_req(opkey, key, value, ttl, 0, options)
end
def replace(key, value, ttl, cas, options)
opkey = quiet? ? :replaceq : :replace
storage_req(opkey, key, value, ttl, cas, options)
end
# rubocop:disable Metrics/ParameterLists
def storage_req(opkey, key, value, ttl, cas, options)
(value, bitflags) = @value_marshaller.store(key, value, options)
ttl = TtlSanitizer.sanitize(ttl)
req = RequestFormatter.standard_request(opkey: opkey, key: key,
value: value, bitflags: bitflags,
ttl: ttl, cas: cas)
write(req)
response_processor.storage_response unless quiet?
end
# rubocop:enable Metrics/ParameterLists
def append(key, value)
opkey = quiet? ? :appendq : :append
write_append_prepend opkey, key, value
end
def prepend(key, value)
opkey = quiet? ? :prependq : :prepend
write_append_prepend opkey, key, value
end
def write_append_prepend(opkey, key, value)
write(RequestFormatter.standard_request(opkey: opkey, key: key, value: value))
response_processor.no_body_response unless quiet?
end
# Delete Commands
def delete(key, cas)
opkey = quiet? ? :deleteq : :delete
req = RequestFormatter.standard_request(opkey: opkey, key: key, cas: cas)
write(req)
response_processor.delete unless quiet?
end
# Arithmetic Commands
def decr(key, count, ttl, initial)
opkey = quiet? ? :decrq : :decr
decr_incr opkey, key, count, ttl, initial
end
def incr(key, count, ttl, initial)
opkey = quiet? ? :incrq : :incr
decr_incr opkey, key, count, ttl, initial
end
# This allows us to special case a nil initial value, and
# handle it differently than a zero. This special value
# for expiry causes memcached to return a not found
# if the key doesn't already exist, rather than
# setting the initial value
NOT_FOUND_EXPIRY = 0xFFFFFFFF
def decr_incr(opkey, key, count, ttl, initial)
expiry = initial ? TtlSanitizer.sanitize(ttl) : NOT_FOUND_EXPIRY
initial ||= 0
write(RequestFormatter.decr_incr_request(opkey: opkey, key: key,
count: count, initial: initial, expiry: expiry))
response_processor.decr_incr unless quiet?
end
# Other Commands
def flush(ttl = 0)
opkey = quiet? ? :flushq : :flush
write(RequestFormatter.standard_request(opkey: opkey, ttl: ttl))
response_processor.no_body_response unless quiet?
end
# Noop is a keepalive operation but also used to demarcate the end of a set of pipelined commands.
# We need to read all the responses at once.
def noop
write_noop
response_processor.consume_all_responses_until_noop
end
def stats(info = '')
req = RequestFormatter.standard_request(opkey: :stat, key: info)
write(req)
response_processor.stats
end
def reset_stats
write(RequestFormatter.standard_request(opkey: :stat, key: 'reset'))
response_processor.reset
end
def version
write(RequestFormatter.standard_request(opkey: :version))
response_processor.version
end
def write_noop
req = RequestFormatter.standard_request(opkey: :noop)
write(req)
end
require_relative 'binary/request_formatter'
require_relative 'binary/response_header'
require_relative 'binary/response_processor'
require_relative 'binary/sasl_authentication'
include SaslAuthentication
end
end
end
dalli-3.2.8/lib/dalli/protocol/binary/ 0000775 0000000 0000000 00000000000 14562521034 0017573 5 ustar 00root root 0000000 0000000 dalli-3.2.8/lib/dalli/protocol/binary/request_formatter.rb 0000664 0000000 0000000 00000006252 14562521034 0023700 0 ustar 00root root 0000000 0000000 # frozen_string_literal: true
module Dalli
module Protocol
class Binary
##
# Class that encapsulates logic for formatting binary protocol requests
# to memcached.
##
class RequestFormatter
REQUEST = 0x80
OPCODES = {
get: 0x00,
set: 0x01,
add: 0x02,
replace: 0x03,
delete: 0x04,
incr: 0x05,
decr: 0x06,
flush: 0x08,
noop: 0x0A,
version: 0x0B,
getkq: 0x0D,
append: 0x0E,
prepend: 0x0F,
stat: 0x10,
setq: 0x11,
addq: 0x12,
replaceq: 0x13,
deleteq: 0x14,
incrq: 0x15,
decrq: 0x16,
flushq: 0x18,
appendq: 0x19,
prependq: 0x1A,
touch: 0x1C,
gat: 0x1D,
auth_negotiation: 0x20,
auth_request: 0x21,
auth_continue: 0x22
}.freeze
REQ_HEADER_FORMAT = 'CCnCCnNNQ'
KEY_ONLY = 'a*'
TTL_AND_KEY = 'Na*'
KEY_AND_VALUE = 'a*a*'
INCR_DECR = 'NNNNNa*'
TTL_ONLY = 'N'
NO_BODY = ''
BODY_FORMATS = {
get: KEY_ONLY,
getkq: KEY_ONLY,
delete: KEY_ONLY,
deleteq: KEY_ONLY,
stat: KEY_ONLY,
append: KEY_AND_VALUE,
prepend: KEY_AND_VALUE,
appendq: KEY_AND_VALUE,
prependq: KEY_AND_VALUE,
auth_request: KEY_AND_VALUE,
auth_continue: KEY_AND_VALUE,
set: 'NNa*a*',
setq: 'NNa*a*',
add: 'NNa*a*',
addq: 'NNa*a*',
replace: 'NNa*a*',
replaceq: 'NNa*a*',
incr: INCR_DECR,
decr: INCR_DECR,
incrq: INCR_DECR,
decrq: INCR_DECR,
flush: TTL_ONLY,
flushq: TTL_ONLY,
noop: NO_BODY,
auth_negotiation: NO_BODY,
version: NO_BODY,
touch: TTL_AND_KEY,
gat: TTL_AND_KEY
}.freeze
FORMAT = BODY_FORMATS.transform_values { |v| REQ_HEADER_FORMAT + v }
# rubocop:disable Metrics/ParameterLists
def self.standard_request(opkey:, key: nil, value: nil, opaque: 0, cas: 0, bitflags: nil, ttl: nil)
extra_len = (bitflags.nil? ? 0 : 4) + (ttl.nil? ? 0 : 4)
key_len = key.nil? ? 0 : key.bytesize
value_len = value.nil? ? 0 : value.bytesize
header = [REQUEST, OPCODES[opkey], key_len, extra_len, 0, 0, extra_len + key_len + value_len, opaque, cas]
body = [bitflags, ttl, key, value].compact
(header + body).pack(FORMAT[opkey])
end
# rubocop:enable Metrics/ParameterLists
def self.decr_incr_request(opkey:, key: nil, count: nil, initial: nil, expiry: nil)
extra_len = 20
(h, l) = as_8byte_uint(count)
(dh, dl) = as_8byte_uint(initial)
header = [REQUEST, OPCODES[opkey], key.bytesize, extra_len, 0, 0, key.bytesize + extra_len, 0, 0]
body = [h, l, dh, dl, expiry, key]
(header + body).pack(FORMAT[opkey])
end
def self.as_8byte_uint(val)
[val >> 32, val & 0xFFFFFFFF]
end
end
end
end
end
dalli-3.2.8/lib/dalli/protocol/binary/response_header.rb 0000664 0000000 0000000 00000001517 14562521034 0023272 0 ustar 00root root 0000000 0000000 # frozen_string_literal: true
module Dalli
module Protocol
class Binary
##
# Class that encapsulates data parsed from a memcached response header.
##
class ResponseHeader
SIZE = 24
FMT = '@2nCCnNNQ'
attr_reader :key_len, :extra_len, :data_type, :status, :body_len, :opaque, :cas
def initialize(buf)
raise ArgumentError, "Response buffer must be at least #{SIZE} bytes" unless buf.bytesize >= SIZE
@key_len, @extra_len, @data_type, @status, @body_len, @opaque, @cas = buf.unpack(FMT)
end
def ok?
status.zero?
end
def not_found?
status == 1
end
NOT_STORED_STATUSES = [2, 5].freeze
def not_stored?
NOT_STORED_STATUSES.include?(status)
end
end
end
end
end
dalli-3.2.8/lib/dalli/protocol/binary/response_processor.rb 0000664 0000000 0000000 00000020034 14562521034 0024054 0 ustar 00root root 0000000 0000000 # frozen_string_literal: true
module Dalli
module Protocol
class Binary
##
# Class that encapsulates logic for processing binary protocol responses
# from memcached. Includes logic for pulling data from an IO source
# and parsing into local values. Handles errors on unexpected values.
##
class ResponseProcessor
# Response codes taken from:
# https://github.com/memcached/memcached/wiki/BinaryProtocolRevamped#response-status
RESPONSE_CODES = {
0 => 'No error',
1 => 'Key not found',
2 => 'Key exists',
3 => 'Value too large',
4 => 'Invalid arguments',
5 => 'Item not stored',
6 => 'Incr/decr on a non-numeric value',
7 => 'The vbucket belongs to another server',
8 => 'Authentication error',
9 => 'Authentication continue',
0x20 => 'Authentication required',
0x81 => 'Unknown command',
0x82 => 'Out of memory',
0x83 => 'Not supported',
0x84 => 'Internal error',
0x85 => 'Busy',
0x86 => 'Temporary failure'
}.freeze
def initialize(io_source, value_marshaller)
@io_source = io_source
@value_marshaller = value_marshaller
end
def read(num_bytes)
@io_source.read(num_bytes)
end
def read_response
resp_header = ResponseHeader.new(read_header)
body = read(resp_header.body_len) if resp_header.body_len.positive?
[resp_header, body]
end
def unpack_response_body(resp_header, body, parse_as_stored_value)
extra_len = resp_header.extra_len
key_len = resp_header.key_len
bitflags = extra_len.positive? ? body.unpack1('N') : 0x0
key = body.byteslice(extra_len, key_len).force_encoding(Encoding::UTF_8) if key_len.positive?
value = body.byteslice((extra_len + key_len)..-1)
value = @value_marshaller.retrieve(value, bitflags) if parse_as_stored_value
[key, value]
end
def read_header
read(ResponseHeader::SIZE) || raise(Dalli::NetworkError, 'No response')
end
def raise_on_not_ok!(resp_header)
return if resp_header.ok?
raise Dalli::DalliError, "Response error #{resp_header.status}: #{RESPONSE_CODES[resp_header.status]}"
end
def get(cache_nils: false)
resp_header, body = read_response
return false if resp_header.not_stored? # Not stored, normal status for add operation
return cache_nils ? ::Dalli::NOT_FOUND : nil if resp_header.not_found?
raise_on_not_ok!(resp_header)
return true unless body
unpack_response_body(resp_header, body, true).last
end
##
# Response for a storage operation. Returns the cas on success. False
# if the value wasn't stored. And raises an error on all other error
# codes from memcached.
##
def storage_response
resp_header, = read_response
return nil if resp_header.not_found?
return false if resp_header.not_stored? # Not stored, normal status for add operation
raise_on_not_ok!(resp_header)
resp_header.cas
end
def delete
resp_header, = read_response
return false if resp_header.not_found? || resp_header.not_stored?
raise_on_not_ok!(resp_header)
true
end
def data_cas_response
resp_header, body = read_response
return [nil, resp_header.cas] if resp_header.not_found?
return [nil, false] if resp_header.not_stored?
raise_on_not_ok!(resp_header)
return [nil, resp_header.cas] unless body
[unpack_response_body(resp_header, body, true).last, resp_header.cas]
end
# Returns the new value for the key, if found and updated
def decr_incr
body = generic_response
body ? body.unpack1('Q>') : body
end
def stats
hash = {}
loop do
resp_header, body = read_response
# This is the response to the terminating noop / end of stat
return hash if resp_header.ok? && resp_header.key_len.zero?
# Ignore any responses with non-zero status codes,
# such as errors from set operations. That allows
# this code to be used at the end of a multi
# block to clear any error responses from inside the multi.
next unless resp_header.ok?
key, value = unpack_response_body(resp_header, body, true)
hash[key] = value
end
end
def flush
no_body_response
end
def reset
generic_response
end
def version
generic_response
end
def consume_all_responses_until_noop
loop do
resp_header, = read_response
# This is the response to the terminating noop / end of stat
return true if resp_header.ok? && resp_header.key_len.zero?
end
end
def generic_response
resp_header, body = read_response
return false if resp_header.not_stored? # Not stored, normal status for add operation
return nil if resp_header.not_found?
raise_on_not_ok!(resp_header)
return true unless body
unpack_response_body(resp_header, body, false).last
end
def no_body_response
resp_header, = read_response
return false if resp_header.not_stored? # Not stored, possible status for append/prepend/delete
raise_on_not_ok!(resp_header)
true
end
def validate_auth_format(extra_len, count)
return if extra_len.zero?
raise Dalli::NetworkError, "Unexpected message format: #{extra_len} #{count}"
end
def auth_response(buf = read_header)
resp_header = ResponseHeader.new(buf)
body_len = resp_header.body_len
validate_auth_format(resp_header.extra_len, body_len)
content = read(body_len) if body_len.positive?
[resp_header.status, content]
end
def contains_header?(buf)
return false unless buf
buf.bytesize >= ResponseHeader::SIZE
end
def response_header_from_buffer(buf)
ResponseHeader.new(buf)
end
##
# This method returns an array of values used in a pipelined
# getk process. The first value is the number of bytes by
# which to advance the pointer in the buffer. If the
# complete response is found in the buffer, this will
# be the response size. Otherwise it is zero.
#
# The remaining three values in the array are the ResponseHeader,
# key, and value.
##
def getk_response_from_buffer(buf)
# There's no header in the buffer, so don't advance
return [0, nil, nil, nil, nil] unless contains_header?(buf)
resp_header = response_header_from_buffer(buf)
body_len = resp_header.body_len
# We have a complete response that has no body.
# This is either the response to the terminating
# noop or, if the status is not zero, an intermediate
# error response that needs to be discarded.
return [ResponseHeader::SIZE, resp_header.ok?, resp_header.cas, nil, nil] if body_len.zero?
resp_size = ResponseHeader::SIZE + body_len
# The header is in the buffer, but the body is not. As we don't have
# a complete response, don't advance the buffer
return [0, nil, nil, nil, nil] unless buf.bytesize >= resp_size
# The full response is in our buffer, so parse it and return
# the values
body = buf.byteslice(ResponseHeader::SIZE, body_len)
key, value = unpack_response_body(resp_header, body, true)
[resp_size, resp_header.ok?, resp_header.cas, key, value]
end
end
end
end
end
dalli-3.2.8/lib/dalli/protocol/binary/sasl_authentication.rb 0000664 0000000 0000000 00000004027 14562521034 0024164 0 ustar 00root root 0000000 0000000 # frozen_string_literal: true
module Dalli
module Protocol
class Binary
##
# Code to support SASL authentication
##
module SaslAuthentication
def perform_auth_negotiation
write(RequestFormatter.standard_request(opkey: :auth_negotiation))
status, content = response_processor.auth_response
return [status, []] if content.nil?
# Substitute spaces for the \x00 returned by
# memcached as a separator for easier
content&.tr!("\u0000", ' ')
mechanisms = content&.split
[status, mechanisms]
end
PLAIN_AUTH = 'PLAIN'
def supported_mechanisms!(mechanisms)
unless mechanisms.include?(PLAIN_AUTH)
raise NotImplementedError,
'Dalli only supports the PLAIN authentication mechanism'
end
[PLAIN_AUTH]
end
def authenticate_with_plain
write(RequestFormatter.standard_request(opkey: :auth_request,
key: PLAIN_AUTH,
value: "\x0#{username}\x0#{password}"))
@response_processor.auth_response
end
def authenticate_connection
Dalli.logger.info { "Dalli/SASL authenticating as #{username}" }
status, mechanisms = perform_auth_negotiation
return Dalli.logger.debug('Authentication not required/supported by server') if status == 0x81
supported_mechanisms!(mechanisms)
status, content = authenticate_with_plain
return Dalli.logger.info("Dalli/SASL: #{content}") if status.zero?
raise Dalli::DalliError, "Error authenticating: 0x#{status.to_s(16)}" unless status == 0x21
raise NotImplementedError, 'No two-step authentication mechanisms supported'
# (step, msg) = sasl.receive('challenge', content)
# raise Dalli::NetworkError, "Authentication failed" if sasl.failed? || step != 'response'
end
end
end
end
end
dalli-3.2.8/lib/dalli/protocol/connection_manager.rb 0000664 0000000 0000000 00000015364 14562521034 0022476 0 ustar 00root root 0000000 0000000 # frozen_string_literal: true
require 'English'
require 'socket'
require 'timeout'
require 'dalli/pid_cache'
module Dalli
module Protocol
##
# Manages the socket connection to the server, including ensuring liveness
# and retries.
##
class ConnectionManager
DEFAULTS = {
# seconds between trying to contact a remote server
down_retry_delay: 30,
# connect/read/write timeout for socket operations
socket_timeout: 1,
# times a socket operation may fail before considering the server dead
socket_max_failures: 2,
# amount of time to sleep between retries when a failure occurs
socket_failure_delay: 0.1,
# Set keepalive
keepalive: true
}.freeze
attr_accessor :hostname, :port, :socket_type, :options
attr_reader :sock
def initialize(hostname, port, socket_type, client_options)
@hostname = hostname
@port = port
@socket_type = socket_type
@options = DEFAULTS.merge(client_options)
@request_in_progress = false
@sock = nil
@pid = nil
reset_down_info
end
def name
if socket_type == :unix
hostname
else
"#{hostname}:#{port}"
end
end
def establish_connection
Dalli.logger.debug { "Dalli::Server#connect #{name}" }
@sock = memcached_socket
@pid = PIDCache.pid
@request_in_progress = false
rescue SystemCallError, *TIMEOUT_ERRORS, EOFError, SocketError => e
# SocketError = DNS resolution failure
error_on_request!(e)
end
def reconnect_down_server?
return true unless @last_down_at
time_to_next_reconnect = @last_down_at + options[:down_retry_delay] - Time.now
return true unless time_to_next_reconnect.positive?
Dalli.logger.debug do
format('down_retry_delay not reached for %s (%