- Nginx自帶模塊( ngx_http_limit_req_module)html
示例以下:支持不一樣級別限流,$binary_remote_addr爲對單個IP限流,$server_name對服務限流node
limit_req_zone $binary_remote_addr zone=perip:10m rate=1r/s; limit_req_zone $server_name zone=perserver:10m rate=10r/s; server { ... limit_req zone=perip burst=5 nodelay; limit_req zone=perserver burst=10; }
- 基於OpenRestylua-resty-limit-traffic模塊nginx
resty.limit.req對請求數,基於漏桶算法的限流git
# demonstrate the usage of the resty.limit.req module (alone!) http { lua_shared_dict my_limit_req_store 100m; server { location / { access_by_lua_block { -- well, we could put the require() and new() calls in our own Lua -- modules to save overhead. here we put them below just for -- convenience. local limit_req = require "resty.limit.req" -- limit the requests under 200 req/sec with a burst of 100 req/sec, -- that is, we delay requests under 300 req/sec and above 200 -- req/sec, and reject any requests exceeding 300 req/sec. local lim, err = limit_req.new("my_limit_req_store", 200, 100) if not lim then ngx.log(ngx.ERR, "failed to instantiate a resty.limit.req object: ", err) return ngx.exit(500) end -- the following call must be per-request. -- here we use the remote (IP) address as the limiting key local key = ngx.var.binary_remote_addr local delay, err = lim:incoming(key, true) if not delay then if err == "rejected" then return ngx.exit(503) end ngx.log(ngx.ERR, "failed to limit req: ", err) return ngx.exit(500) end if delay >= 0.001 then -- the 2nd return value holds the number of excess requests -- per second for the specified key. for example, number 31 -- means the current request rate is at 231 req/sec for the -- specified key. local excess = err -- the request exceeding the 200 req/sec but below 300 req/sec, -- so we intentionally delay it here a bit to conform to the -- 200 req/sec rate. ngx.sleep(delay) end } # content handler goes here. if it is content_by_lua, then you can # merge the Lua code above in access_by_lua into your content_by_lua's # Lua handler to save a little bit of CPU time. } } }
resty.limit.conn對當前併發鏈接數,基於漏桶算法的限流github
# demonstrate the usage of the resty.limit.conn module (alone!) http { lua_shared_dict my_limit_conn_store 100m; server { location / { access_by_lua_block { -- well, we could put the require() and new() calls in our own Lua -- modules to save overhead. here we put them below just for -- convenience. local limit_conn = require "resty.limit.conn" -- limit the requests under 200 concurrent requests (normally just -- incoming connections unless protocols like SPDY is used) with -- a burst of 100 extra concurrent requests, that is, we delay -- requests under 300 concurrent connections and above 200 -- connections, and reject any new requests exceeding 300 -- connections. -- also, we assume a default request time of 0.5 sec, which can be -- dynamically adjusted by the leaving() call in log_by_lua below. local lim, err = limit_conn.new("my_limit_conn_store", 200, 100, 0.5) if not lim then ngx.log(ngx.ERR, "failed to instantiate a resty.limit.conn object: ", err) return ngx.exit(500) end -- the following call must be per-request. -- here we use the remote (IP) address as the limiting key local key = ngx.var.binary_remote_addr local delay, err = lim:incoming(key, true) if not delay then if err == "rejected" then return ngx.exit(503) end ngx.log(ngx.ERR, "failed to limit req: ", err) return ngx.exit(500) end if lim:is_committed() then local ctx = ngx.ctx ctx.limit_conn = lim ctx.limit_conn_key = key ctx.limit_conn_delay = delay end -- the 2nd return value holds the current concurrency level -- for the specified key. local conn = err if delay >= 0.001 then -- the request exceeding the 200 connections ratio but below -- 300 connections, so -- we intentionally delay it here a bit to conform to the -- 200 connection limit. -- ngx.log(ngx.WARN, "delaying") ngx.sleep(delay) end } # content handler goes here. if it is content_by_lua, then you can # merge the Lua code above in access_by_lua into your # content_by_lua's Lua handler to save a little bit of CPU time. log_by_lua_block { local ctx = ngx.ctx local lim = ctx.limit_conn if lim then -- if you are using an upstream module in the content phase, -- then you probably want to use $upstream_response_time -- instead of ($request_time - ctx.limit_conn_delay) below. local latency = tonumber(ngx.var.request_time) - ctx.limit_conn_delay local key = ctx.limit_conn_key assert(key) local conn, err = lim:leaving(key, latency) if not conn then ngx.log(ngx.ERR, "failed to record the connection leaving ", "request: ", err) return end end } } } }
resty.limit.count對不一樣用戶進行限流,如一個用戶指定時間內能夠訪問多少次,參考:GitHub API rate-limitingredis
http { lua_shared_dict my_limit_count_store 100m; init_by_lua_block { require "resty.core" } server { location / { access_by_lua_block { local limit_count = require "resty.limit.count" -- rate: 5000 requests per 3600s local lim, err = limit_count.new("my_limit_count_store", 5000, 3600) if not lim then ngx.log(ngx.ERR, "failed to instantiate a resty.limit.count object: ", err) return ngx.exit(500) end -- use the Authorization header as the limiting key local key = ngx.req.get_headers()["Authorization"] or "public" local delay, err = lim:incoming(key, true) if not delay then if err == "rejected" then ngx.header["X-RateLimit-Limit"] = "5000" ngx.header["X-RateLimit-Remaining"] = 0 return ngx.exit(503) end ngx.log(ngx.ERR, "failed to limit count: ", err) return ngx.exit(500) end -- the 2nd return value holds the current remaining number -- of requests for the specified key. local remaining = err ngx.header["X-RateLimit-Limit"] = "5000" ngx.header["X-RateLimit-Remaining"] = remaining } } } }
resty.limit.traffic用於組合resty.limit.req,resty.limit.conn,resty.limit.count算法
PS:基於Oenresty + Redis實現限流併發
本身練習的代碼,能夠根據不一樣狀況,選用不一樣規則做爲Redis的keyless
- 基於時間進行限流ui
- 基於IP限流(保證上游透傳真實IP)
- 基於用戶信息(需請求時攜帶用戶標識)
local redis = require "resty.redis" local red = redis:new() red:set_timeout(1000) local ok, err = red:connect("127.0.0.1", 6379) local key = ngx.time() local res, err = red:incr(key) if res == 1 then red:expire(key, 2) end red:set_keepalive(10000, 100) if res > 500 then return ngx.exit(ngx.HTTP_SERVICE_UNAVAILABLE) end