more than cache
David Schovanec
12.1.2017 Prague Ruby Meetup
2
3.
# Value key
SET "some multiworld key" "hello world"
GET "some multiworld key" # => "hello world"
# Hash
HSET some_data name "David Schovanec"
HSET some_data username "schovi"
HGETALL some_data
# => 1) "name" 2) "David Schovanec" 3) "username" 4) "schovi"
# List
RPUSH supercalifragilisticexpialidocious "value" "another" "and one more"
LLEN supercalifragilisticexpialidocious # => 3
# Set
SADD meetup:speakers "David" "Jan Rezab"
SADD meetup:organisers "David" "Petra" "Honza"
SINTERSTORE meetup:speakers_and_organisers speakers organisers
SMEMBERS meetup:speakers_and_organisers # => 1) "David"
# Sorted Set
ZADD family_members 30 "David" 29 "Hanka" 0.9534246575 "Sára"
ZRANGE family_members 0 1 WITHSCORES
# => 1) "Sára" 2) "0.95342465750000005" 3) "Hanka" 4) "29"
4.
INCR user:1232:clicks #=> 1, then 2, etc...
GETSET user:1232:clicks 0 #=> return last value and set new one
# Set value with expiration
SETEX cache:post:1762:html 3600 "<div><h1>Title</h1><p>content...</p></div>"
GETRANGE cache:post:1762:html 0 7 # => "<div><h1"
# Non-blocking delete
UNLINK cache:post:1762:html
## Permission handling with BIT operations
SETBIT access:compay:123:editor "\x10\x00\xf0"
SETBIT access:compay:123:graphic "\x82\x00\x00"
BITOP OR access:user:1001 access:compay:123:editor access:compay:123:graphic
GET access:user:1001
# => "\x92\x00\xf0"
## GEO
GEOADD places 14.3255421 50.0598058 "Praha" 16.5079212 49.2021611 "Brno"
GEODIST places "Praha" "Brno" km # => 183.8881
GEOADD places 15.5105797 49.4045045 "Jihlava"
GEORADIUSBYMEMBER places "Jihlava" 200 km WITHDIST
# =>
1) 1) "Praha" 2) "112.1166"
2) 1) "Jihlava" 2) "0.0000"
3) 1) "Brno" 2) "75.7526"
5.
# Create meetup
HMSET meetup:13 id 13 name "Prague Ruby Meetup" attendees 20
SADD meetups 13
# Create user
HMSET user:1001 id 1001 name "David Schovanec" username "Schovi" age 30
SADD users 1001
# User attends meetup
SADD meetup:1:attendees 1001
# User organise meetup
SADD meetup:1:organisers 1001
# (Pseudo language. In redis-cli you cant assign variable)
attendees_keys = SMEMBERS meetup:13:attendees
for(key in attendees_keys) {
HGETALL key
# => 1) id 2) 1001 3) name 4) "David Schovanec" 5) username 6)"Schovi" 7) age 8) 30
}
6.
## Basic Queue
RPUSH queue "Job to be done"
RPUSH queue "Job to be done later"
LPOP queue # => "Job to be done"
## Priority queue
RPUSH queue:high "Urgent job"
RPUSH queue:low "Boring job"
BLPOP queue:high queue:low 2 # => "Urgent job"
BLPOP queue:high queue:low 2 # => "Boring job"
# Next same call waits 2 secs if some other client push element to one of lists
## Reliable queue
LPUSH queue:pending "Job to be done"
RPOPLPUSH queue:pending queue:processing # => "Job to be done"
## Unique jobs with order
SET job:post:768:process_at 1484080658
SET job:post:6757:process_at 1484080687
SET job:post:768:data "#Markdown **bold**"
SET job:post:6757:data "##Title 2"
SADD unique:pending post:768 post:6757
SORT unique:pending BY "job:*:process_at" ASC GET "#" GET "job:*:data" LIMIT 0 1
# => 1) "post:768" 2) "#Markdown **bold**"
# Can be reliable too
SORT unique:pending BY ... STORE some_key
7.
## Basic transactions
MULTI
SREM queue job:786 # => QUEUED
SREM queue:processing job:786 # => QUEUED
DEL job:786 # => QUEUED
# Execute transaction. Calls queued commands
EXEC
# => 1) 1 2) 1 3) 1
## Safe transaction
WATCH queue
SREM queue job:786 # => QUEUED
DEL job:786 # => QUEUED
## Meanwhile other client run
SADD queue job:797
## Back in our client
EXEC
# => (nil)
# Which means it failed because watched key changed
8.
## Client 1
SUBSCRIBE my_channel
# Reading messages... (press Ctrl-C to quit)
# 1) "subscribe" 2) "my_channel" 3) (integer) 1
## Client 2 send message
PUBLISH my_channel "Hello world" # => 1 (count of subscribers. 0 for none)
## Client 1 receive
1) "message"
2) "my_channel"
3) "Hello world"
## Pattern match subscribing
PSUBSCRIBE socket:chat:* socket:news_feed:*
1) "psubscribe" 2) "socket:chat:*" 3) (integer) 1 1) "psubscribe" 2) "socket:news_feed:*" 3) (integer) 2
# PUBLISH socket:chat:1 "Good afternoon" # => 1
1) "pmessage"
2) "socket:chat:*"
3) "socket:chat:1"
4) "Good afternoon"
9.
10.
# Ruby
# Custom ZLPOPZADD with MULTI command
redis.multi do
job, score = redis.zrange("queue:pending", 0, 1, "WITHSCORES")
redis.zadd("queue:processing", score, job)
redis.zrem("queue:pending", job)
end
# Wont fail, but wont work too. Commands are queued and run on end.
# You dont have access to semi results (it will contains some random values)
# Ruby
# Custom ZLPOPZADD with sequence of commands
job, score = redis.zrange("queue:pending", 0, 1, "WITHSCORES")
redis.zadd("queue:processing", score, job)
redis.zrem("queue:pending", job)
# Will work, but anybody can change data in middle
# of commands and you wont be able to debug what happened
11.
-- Initialize arguments. There are 2 kinds. KEYS and ARGV
local fromKey = KEYS[1]
local toKey = KEYS[2]
local toMove = redis.call('ZRANGE', fromKey, 0, 0, 'WITHSCORES')
-- If length of result is non empty, then continue
if #toMove > 0 then
local member = toMove[1]
local score = toMove[2]
redis.call('ZREM', fromKey, member)
redis.call('ZADD', toKey, score, member)
return member
end
EVAL script_string 2 queue:pending queue:processing # => nil | poped value
# Dont send whole script over network again and again
SCRIPT LOAD script_string # => Script sha1
EVALSHA script_sha1 2 queue:pending queue:processing "argument if needed"
# Debugging in terminal
redis-cli --ldb --eval ./zlpopzadd.lua pending processing , "other arg"
# Enable debugging directly in redis
SCRIPT DEBUG YES|SYNC|NO
12.
13.
Snapshotting
# redis.conf
dbfilename dump.rdb
save 900 1 # save after 15 minutes if 1 key changed
save 300 10 # save after 5 minuts if 10 keys changed
save 60 10000 # save after 1 minute if 10000 keys changed
Appending
# redis.conf
appendonly yes
appendfilename appendonly.aof
# don't fsync, just let the OS flush the data when it wants. Faster.
appendfsync no
# fsync after every write to the append only log . Slow, Safest
appendfsync always
# fsync only one time every second. Compromise.
appendfsync everysec
14.
15.
Question time with rewards!