diff --git a/.travis.yml b/.travis.yml index f655d80ae8ff..fcdd5b7aae6a 100644 --- a/.travis.yml +++ b/.travis.yml @@ -15,6 +15,7 @@ before_install: - bash .travis/setup_cassandra.sh install: + - sudo make install - sudo make dev - sudo sed -i.bak s@/usr/local/bin/luajit@/usr/bin/lua@g /usr/local/bin/busted diff --git a/kong-0.3.0-1.rockspec b/kong-0.3.0-1.rockspec index 42121ce8844e..bb1bb9cbf26e 100644 --- a/kong-0.3.0-1.rockspec +++ b/kong-0.3.0-1.rockspec @@ -98,9 +98,6 @@ build = { ["kong.plugins.keyauth.schema"] = "kong/plugins/keyauth/schema.lua", ["kong.plugins.keyauth.api"] = "kong/plugins/keyauth/api.lua", - ["kong.plugins.log_serializers.basic"] = "kong/plugins/log_serializers/basic.lua", - ["kong.plugins.log_serializers.alf"] = "kong/plugins/log_serializers/alf.lua", - ["kong.plugins.tcplog.handler"] = "kong/plugins/tcplog/handler.lua", ["kong.plugins.tcplog.log"] = "kong/plugins/tcplog/log.lua", ["kong.plugins.tcplog.schema"] = "kong/plugins/tcplog/schema.lua", @@ -114,12 +111,10 @@ build = { ["kong.plugins.httplog.schema"] = "kong/plugins/httplog/schema.lua", ["kong.plugins.filelog.handler"] = "kong/plugins/filelog/handler.lua", + ["kong.plugins.filelog.log"] = "kong/plugins/filelog/log.lua", ["kong.plugins.filelog.schema"] = "kong/plugins/filelog/schema.lua", ["kong.plugins.filelog.fd_util"] = "kong/plugins/filelog/fd_util.lua", - ["kong.plugins.analytics.handler"] = "kong/plugins/analytics/handler.lua", - ["kong.plugins.analytics.schema"] = "kong/plugins/analytics/schema.lua", - ["kong.plugins.ratelimiting.handler"] = "kong/plugins/ratelimiting/handler.lua", ["kong.plugins.ratelimiting.access"] = "kong/plugins/ratelimiting/access.lua", ["kong.plugins.ratelimiting.schema"] = "kong/plugins/ratelimiting/schema.lua", diff --git a/kong.yml b/kong.yml index a47969fbc76f..2af0d253ca9c 100644 --- a/kong.yml +++ b/kong.yml @@ -12,7 +12,6 @@ plugins_available: - request_transformer - response_transformer - requestsizelimiting - - analytics ## The Kong working directory ## (Make sure you have read and write permissions) diff --git a/kong/kong.lua b/kong/kong.lua index 39a6ad067b56..3730e0d04d5c 100644 --- a/kong/kong.lua +++ b/kong/kong.lua @@ -30,6 +30,7 @@ local cache = require "kong.tools.database_cache" local stringy = require "stringy" local constants = require "kong.constants" local responses = require "kong.tools.responses" +local timestamp = require "kong.tools.timestamp" -- Define the plugins to load here, in the appropriate order local plugins = {} @@ -166,10 +167,10 @@ function _M.exec_plugins_certificate() return end --- Calls plugins' access() on every loaded plugin +-- Calls plugins_access() on every loaded plugin function _M.exec_plugins_access() -- Setting a property that will be available for every plugin - ngx.ctx.started_at = ngx.req.start_time() + ngx.ctx.started_at = timestamp.get_utc() ngx.ctx.plugin_conf = {} -- Iterate over all the plugins @@ -185,9 +186,9 @@ function _M.exec_plugins_access() end end - local plugin_conf = ngx.ctx.plugin_conf[plugin.name] - if not ngx.ctx.stop_phases and (plugin.resolver or plugin_conf) then - plugin.handler:access(plugin_conf and plugin_conf.value or nil) + local conf = ngx.ctx.plugin_conf[plugin.name] + if not ngx.ctx.stop_phases and (plugin.resolver or conf) then + plugin.handler:access(conf and conf.value or nil) end end @@ -199,20 +200,20 @@ function _M.exec_plugins_access() end ngx.var.backend_url = final_url - ngx.ctx.proxy_started_at = ngx.now() -- Setting a property that will be available for every plugin + ngx.ctx.proxy_started_at = timestamp.get_utc() -- Setting a property that will be available for every plugin end -- Calls header_filter() on every loaded plugin function _M.exec_plugins_header_filter() - ngx.ctx.proxy_ended_at = ngx.now() -- Setting a property that will be available for every plugin + ngx.ctx.proxy_ended_at = timestamp.get_utc() -- Setting a property that will be available for every plugin if not ngx.ctx.stop_phases then ngx.header["Via"] = constants.NAME.."/"..constants.VERSION for _, plugin in ipairs(plugins) do - local plugin_conf = ngx.ctx.plugin_conf[plugin.name] - if plugin_conf then - plugin.handler:header_filter(plugin_conf.value) + local conf = ngx.ctx.plugin_conf[plugin.name] + if conf then + plugin.handler:header_filter(conf.value) end end end @@ -222,9 +223,9 @@ end function _M.exec_plugins_body_filter() if not ngx.ctx.stop_phases then for _, plugin in ipairs(plugins) do - local plugin_conf = ngx.ctx.plugin_conf[plugin.name] - if plugin_conf then - plugin.handler:body_filter(plugin_conf.value) + local conf = ngx.ctx.plugin_conf[plugin.name] + if conf then + plugin.handler:body_filter(conf.value) end end end @@ -233,10 +234,33 @@ end -- Calls log() on every loaded plugin function _M.exec_plugins_log() if not ngx.ctx.stop_phases then + -- Creating the log variable that will be serialized + local message = { + request = { + uri = ngx.var.request_uri, + request_uri = ngx.var.scheme.."://"..ngx.var.host..":"..ngx.var.server_port..ngx.var.request_uri, + querystring = ngx.req.get_uri_args(), -- parameters, as a table + method = ngx.req.get_method(), -- http method + headers = ngx.req.get_headers(), + size = ngx.var.request_length + }, + response = { + status = ngx.status, + headers = ngx.resp.get_headers(), + size = ngx.var.bytes_sent + }, + authenticated_entity = ngx.ctx.authenticated_entity, + api = ngx.ctx.api, + client_ip = ngx.var.remote_addr, + started_at = ngx.req.start_time() * 1000 + } + + ngx.ctx.log_message = message + for _, plugin in ipairs(plugins) do - local plugin_conf = ngx.ctx.plugin_conf[plugin.name] - if plugin_conf then - plugin.handler:log(plugin_conf.value) + local conf = ngx.ctx.plugin_conf[plugin.name] + if conf then + plugin.handler:log(conf.value) end end end diff --git a/kong/plugins/analytics/handler.lua b/kong/plugins/analytics/handler.lua deleted file mode 100644 index 4237867926d4..000000000000 --- a/kong/plugins/analytics/handler.lua +++ /dev/null @@ -1,163 +0,0 @@ --- Analytics plugin handler. --- --- How it works: --- Keep track of calls made to configured APIs on a per-worker basis, using the ALF format --- (alf_serializer.lua). `:access()` and `:body_filter()` are implemented to record some properties --- required for the ALF entry. --- --- When the buffer is full (it reaches the `batch_size` configuration value), send the batch to the server. --- If the server doesn't accept it, don't flush the data and it'll try again at the next call. --- If the server accepted the batch, flush the buffer. --- --- In order to keep Analytics as real-time as possible, we also start a 'delayed timer' running in background. --- If no requests are made during a certain period of time (the `delay` configuration value), the --- delayed timer will fire and send the batch + flush the data, not waiting for the buffer to be full. - -local http = require "resty_http" -local BasePlugin = require "kong.plugins.base_plugin" -local ALFSerializer = require "kong.plugins.log_serializers.alf" - -local ALF_BUFFER = {} -local DELAYED_LOCK = false -local LATEST_CALL - -local ANALYTICS_SOCKET = { - host = "localhost", -- socket.analytics.mashape.com - port = 58000, - path = "/alf_1.0.0" -} - -local function send_batch(premature, conf, alf) - -- Abort the sending if the entries are empty, maybe it was triggered from the delayed - -- timer, but already sent because we reached the limit in a request later. - if table.getn(alf.har.log.entries) < 1 then - return - end - - local message = alf:to_json_string(conf.service_token) - - local ok, err - local client = http:new() - client:set_timeout(50000) -- 5 sec - - ok, err = client:connect(ANALYTICS_SOCKET.host, ANALYTICS_SOCKET.port) - if not ok then - ngx.log(ngx.ERR, "[analytics] failed to connect to the socket: "..err) - return - end - - local res, err = client:request({ path = ANALYTICS_SOCKET.path, body = message }) - if not res then - ngx.log(ngx.ERR, "[analytics] failed to send batch: "..err) - end - - -- close connection, or put it into the connection pool - if res.headers["connection"] == "close" then - ok, err = client:close() - if not ok then - ngx.log(ngx.ERR, "[analytics] failed to close: "..err) - end - else - client:set_keepalive() - end - - if res.status == 200 then - alf:flush_entries() - ngx.log(ngx.DEBUG, "[analytics] successfully saved the batch") - else - ngx.log(ngx.ERR, "[analytics] socket refused the batch: "..res.body) - end -end - --- A handler for delayed batch sending. When no call have been made for X seconds --- (X being conf.delay), we send the batch to keep analytics as close to real-time --- as possible. -local delayed_send_handler -delayed_send_handler = function(premature, conf, alf) - -- If the latest call was received during the wait delay, abort the delayed send and - -- report it for X more seconds. - if ngx.now() - LATEST_CALL < conf.delay then - local ok, err = ngx.timer.at(conf.delay, delayed_send_handler, conf, alf) - if not ok then - ngx.log(ngx.ERR, "[analytics] failed to create delayed batch sending timer: ", err) - end - else - DELAYED_LOCK = false -- re-enable creation of a delayed-timer - send_batch(premature, conf, alf) - end -end - --- --- --- - -local AnalyticsHandler = BasePlugin:extend() - -function AnalyticsHandler:new() - AnalyticsHandler.super.new(self, "analytics") -end - -function AnalyticsHandler:access(conf) - AnalyticsHandler.super.access(self) - - -- Retrieve and keep in memory the bodies for this request - ngx.ctx.analytics = { - req_body = "", - res_body = "" - } - - if conf.log_body then - ngx.req.read_body() - ngx.ctx.analytics.req_body = ngx.req.get_body_data() - end -end - -function AnalyticsHandler:body_filter(conf) - AnalyticsHandler.super.body_filter(self) - - local chunk, eof = ngx.arg[1], ngx.arg[2] - -- concatenate response chunks for ALF's `response.content.text` - if conf.log_body then - ngx.ctx.analytics.res_body = ngx.ctx.analytics.res_body..chunk - end - - if eof then -- latest chunk - ngx.ctx.analytics.response_received = ngx.now() - end -end - -function AnalyticsHandler:log(conf) - AnalyticsHandler.super.log(self) - - local api_id = ngx.ctx.api.id - - -- Create the ALF if not existing for this API - if not ALF_BUFFER[api_id] then - ALF_BUFFER[api_id] = ALFSerializer:new_alf() - end - - -- Simply adding the entry to the ALF - local n_entries = ALF_BUFFER[api_id]:add_entry(ngx) - - -- Keep track of the latest call for the delayed timer - LATEST_CALL = ngx.now() - - if n_entries >= conf.batch_size then - -- Batch size reached, let's send the data - local ok, err = ngx.timer.at(0, send_batch, conf, ALF_BUFFER[api_id]) - if not ok then - ngx.log(ngx.ERR, "[analytics] failed to create batch sending timer: ", err) - end - elseif not DELAYED_LOCK then - DELAYED_LOCK = true -- Make sure only one delayed timer is ever pending - -- Batch size not yet reached. - -- Set a timer sending the data only in case nothing happens for awhile or if the batch_size is taking - -- too much time to reach the limit and trigger the flush. - local ok, err = ngx.timer.at(conf.delay, delayed_send_handler, conf, ALF_BUFFER[api_id]) - if not ok then - ngx.log(ngx.ERR, "[analytics] failed to create delayed batch sending timer: ", err) - end - end -end - -return AnalyticsHandler diff --git a/kong/plugins/analytics/schema.lua b/kong/plugins/analytics/schema.lua deleted file mode 100644 index 444c12cc6d98..000000000000 --- a/kong/plugins/analytics/schema.lua +++ /dev/null @@ -1,6 +0,0 @@ -return { - service_token = { type = "string", required = true }, - batch_size = { type = "number", default = 100 }, - log_body = { type = "boolean", default = false }, - delay = { type = "number", default = 10 } -} diff --git a/kong/plugins/filelog/handler.lua b/kong/plugins/filelog/handler.lua index 4a950e5ef41c..70ce61cd6d98 100644 --- a/kong/plugins/filelog/handler.lua +++ b/kong/plugins/filelog/handler.lua @@ -1,5 +1,7 @@ -local log = require "kong.plugins.filelog.log" +-- Copyright (C) Mashape, Inc. + local BasePlugin = require "kong.plugins.base_plugin" +local log = require "kong.plugins.filelog.log" local FileLogHandler = BasePlugin:extend() diff --git a/kong/plugins/filelog/log.lua b/kong/plugins/filelog/log.lua index 577abfa36ffe..55464c580cb9 100644 --- a/kong/plugins/filelog/log.lua +++ b/kong/plugins/filelog/log.lua @@ -1,9 +1,7 @@ -- Copyright (C) Mashape, Inc. - -local ffi = require "ffi" local cjson = require "cjson" +local ffi = require "ffi" local fd_util = require "kong.plugins.filelog.fd_util" -local basic_serializer = require "kong.plugins.log_serializers.basic" ffi.cdef[[ typedef struct { @@ -26,7 +24,7 @@ int fprintf(FILE *stream, const char *format, ...); -- @param `conf` Configuration table, holds http endpoint details -- @param `message` Message to be logged local function log(premature, conf, message) - message = cjson.encode(message).."\n" + local message = cjson.encode(message).."\n" local f = fd_util.get_fd() if not f then @@ -41,9 +39,7 @@ end local _M = {} function _M.execute(conf) - local message = basic_serializer.serialize(ngx) - - local ok, err = ngx.timer.at(0, log, conf, message) + local ok, err = ngx.timer.at(0, log, conf, ngx.ctx.log_message) if not ok then ngx.log(ngx.ERR, "[filelog] failed to create timer: ", err) end diff --git a/kong/plugins/httplog/handler.lua b/kong/plugins/httplog/handler.lua index 4871349c827d..5d68c0dab6a4 100644 --- a/kong/plugins/httplog/handler.lua +++ b/kong/plugins/httplog/handler.lua @@ -1,4 +1,3 @@ -local basic_serializer = require "kong.plugins.log_serializers.basic" local BasePlugin = require "kong.plugins.base_plugin" local log = require "kong.plugins.httplog.log" @@ -10,9 +9,7 @@ end function HttpLogHandler:log(conf) HttpLogHandler.super.log(self) - - local message = basic_serializer.serialize(ngx) - log.execute(conf, message) + log.execute(conf) end return HttpLogHandler diff --git a/kong/plugins/httplog/log.lua b/kong/plugins/httplog/log.lua index d1031c3ebb13..7d3faf8f2ef6 100644 --- a/kong/plugins/httplog/log.lua +++ b/kong/plugins/httplog/log.lua @@ -9,9 +9,9 @@ local _M = {} -- @param `message` Message to be logged -- @return `payload` http payload local function generate_post_payload(method, parsed_url, message) - local body = cjson.encode(message) + local body = cjson.encode(message); local payload = string.format( - "%s %s HTTP/1.1\r\nHost: %s\r\nConnection: Keep-Alive\r\nContent-Type: application/json\r\nContent-Length: %s\r\n\r\n%s", + "%s %s HTTP/1.1\r\nHost: %s\r\nConnection: Keep-Alive\r\nContent-Type: application/json\r\nContent-Length: %s\r\n\r\n%s", method:upper(), parsed_url.path, parsed_url.host, string.len(body), body) return payload end @@ -65,11 +65,11 @@ local function log(premature, conf, message) end end -function _M.execute(conf, message) - local ok, err = ngx.timer.at(0, log, conf, message) +function _M.execute(conf) + local ok, err = ngx.timer.at(0, log, conf, ngx.ctx.log_message) if not ok then ngx.log(ngx.ERR, "[httplog] failed to create timer: ", err) end end -return _M +return _M \ No newline at end of file diff --git a/kong/plugins/log_serializers/alf.lua b/kong/plugins/log_serializers/alf.lua deleted file mode 100644 index 2fe194d11d3e..000000000000 --- a/kong/plugins/log_serializers/alf.lua +++ /dev/null @@ -1,193 +0,0 @@ --- ALF serializer module. --- ALF is the format supported by Mashape Analytics (http://apianalytics.com) --- --- This module represents _one_ ALF entry, which can have multiple requests entries. --- # Usage: --- --- ## Create the ALF like so: --- local alf = ALFSerializer:new_alf() --- --- ## Add entries: --- local n_entries = alf:add_entry(ngx) --- --- ## Output the ALF with all its entries as JSON: --- local json_str = alf:to_json_str(service_token) --- --- - ALF specifications: https://github.com/Mashape/api-log-format --- - Nginx lua module documentation: http://wiki.nginx.org/HttpLuaModule --- - ngx_http_core_module: http://wiki.nginx.org/HttpCoreModule#.24http_HEADER - -local json = require "cjson" - -local EMPTY_ARRAY_PLACEHOLDER = "__empty_array_placeholder__" - -local alf_mt = {} -alf_mt.__index = alf_mt - -function alf_mt:new_alf() - local ALF = { - version = "1.0.0", - serviceToken = "", -- will be filled by to_json_string() - har = { - log = { - version = "1.2", - creator = { - name = "kong-mashape-analytics-plugin", - version = "1.0.0" - }, - entries = {} - } - } - } - - return setmetatable(ALF, self) -end - --- Transform a key/value lua table into an array of elements with `name`, `value`. --- Since Lua won't recognize {} as an empty array but an empty object, we need to force it --- to be an array, hence we will do "[__empty_array_placeholder__]". --- Then once the ALF will be stringified, we will remove the placeholder so the only left element will be "[]". --- @param `hash` key/value dictionary to serialize. --- @param `fn` Some function to execute at each key iteration, with the key and value as parameters. --- @return `array` an array, or nil -local function dic_to_array(hash, fn) - if not fn then fn = function() end end - local arr = {} - for k, v in pairs(hash) do - -- If the key has multiple values, v will be an array of all those values for the same key - -- hence we have to add multiple entries to the output array for that same key. - if type(v) ~= "table" then - v = {v} - end - for _, val in ipairs(v) do - table.insert(arr, { name = k, value = val }) - fn(k, val) - end - end - - if #arr > 0 then - return arr - else - return {EMPTY_ARRAY_PLACEHOLDER} - end -end - --- Round a number to the third decimal. --- http://lua-users.org/wiki/SimpleRound -local function round(num) - return math.floor(num * 10^3 + 0.5) / 10^3 -end - --- Serialize `ngx` into one ALF entry. --- For performance reasons, it tries to use the NGINX Lua API instead of --- ngx_http_core_module when possible. --- Public for unit testing. -function alf_mt:serialize_entry(ngx) - -- ALF properties computation. Properties prefixed with 'alf_' will belong to the ALF entry. - -- other properties are used to compute the ALF properties. - - -- bodies - local analytics_data = ngx.ctx.analytics - - local alf_req_body = analytics_data.req_body - local alf_res_body = analytics_data.res_body - - -- timers - local proxy_started_at, proxy_ended_at = ngx.ctx.proxy_started_at, ngx.ctx.proxy_ended_at - - local alf_started_at = ngx.ctx.started_at - local alf_send_time = proxy_started_at - alf_started_at - local alf_wait_time = proxy_ended_at ~= nil and proxy_ended_at - proxy_started_at or -1 - local alf_receive_time = analytics_data.response_received and analytics_data.response_received - proxy_ended_at or -1 - -- Compute the total time. If some properties were unavailable - -- (because the proxying was aborted), then don't add the value. - local alf_time = 0 - for _, timer in ipairs({alf_send_time, alf_wait_time, alf_receive_time}) do - if timer > 0 then - alf_time = alf_time + timer - end - end - - -- headers and headers size - local req_headers_str, res_headers_str = "", "" - local req_headers = ngx.req.get_headers() - local res_headers = ngx.resp.get_headers() - - local alf_req_headers_arr = dic_to_array(req_headers, function(k, v) req_headers_str = req_headers_str..k..v end) - local alf_res_headers_arr = dic_to_array(res_headers, function(k, v) res_headers_str = res_headers_str..k..v end) - local alf_req_headers_size = string.len(req_headers_str) - local alf_res_headers_size = string.len(res_headers_str) - - -- mimeType, defaulting to "application/octet-stream" - local alf_req_mimeType = req_headers["Content-Type"] and req_headers["Content-Type"] or "application/octet-stream" - local alf_res_mimeType = res_headers["Content-Type"] and res_headers["Content-Type"] or "application/octet-stream" - - return { - startedDateTime = os.date("!%Y-%m-%dT%TZ", alf_started_at), - clientIPAddress = ngx.var.remote_addr, - time = round(alf_time), - request = { - method = ngx.req.get_method(), - url = ngx.var.scheme.."://"..ngx.var.host..ngx.var.uri, - httpVersion = "HTTP/"..ngx.req.http_version(), - queryString = dic_to_array(ngx.req.get_uri_args()), - headers = alf_req_headers_arr, - headersSize = alf_req_headers_size, - cookies = {EMPTY_ARRAY_PLACEHOLDER}, - bodySize = string.len(alf_req_body), - postData = { - mimeType = alf_req_mimeType, - params = dic_to_array(ngx.req.get_post_args()), - text = alf_req_body and alf_req_body or "" - } - }, - response = { - status = ngx.status, - statusText = "", -- can't find a way to retrieve that - httpVersion = "", -- can't find a way to retrieve that either - headers = alf_res_headers_arr, - headersSize = alf_res_headers_size, - cookies = {EMPTY_ARRAY_PLACEHOLDER}, - bodySize = tonumber(ngx.var.body_bytes_sent), - redirectURL = "", - content = { - size = tonumber(ngx.var.body_bytes_sent), - mimeType = alf_res_mimeType, - text = alf_res_body and alf_res_body or "" - } - }, - cache = {}, - timings = { - send = round(alf_send_time), - wait = round(alf_wait_time), - receive = round(alf_receive_time), - blocked = -1, - connect = -1, - dns = -1, - ssl = -1 - } - } -- end of entry -end - -function alf_mt:add_entry(ngx) - table.insert(self.har.log.entries, self:serialize_entry(ngx)) - return table.getn(self.har.log.entries) -end - -function alf_mt:to_json_string(token) - if not token then - error("Mashape Analytics serviceToken required", 2) - end - - -- inject token - self.serviceToken = token - - local str = json.encode(self) - return str:gsub("\""..EMPTY_ARRAY_PLACEHOLDER.."\"", ""):gsub("\\/", "/") -end - -function alf_mt:flush_entries() - self.har.log.entries = {} -end - -return alf_mt diff --git a/kong/plugins/log_serializers/basic.lua b/kong/plugins/log_serializers/basic.lua deleted file mode 100644 index 82619262f88e..000000000000 --- a/kong/plugins/log_serializers/basic.lua +++ /dev/null @@ -1,25 +0,0 @@ -local _M = {} - -function _M.serialize(ngx) - return { - request = { - uri = ngx.var.request_uri, - request_uri = ngx.var.scheme.."://"..ngx.var.host..":"..ngx.var.server_port..ngx.var.request_uri, - querystring = ngx.req.get_uri_args(), -- parameters, as a table - method = ngx.req.get_method(), - headers = ngx.req.get_headers(), - size = ngx.var.request_length - }, - response = { - status = ngx.status, - headers = ngx.resp.get_headers(), - size = ngx.var.bytes_sent - }, - authenticated_entity = ngx.ctx.authenticated_entity, - api = ngx.ctx.api, - client_ip = ngx.var.remote_addr, - started_at = ngx.req.start_time() * 1000 - } -end - -return _M diff --git a/kong/plugins/tcplog/handler.lua b/kong/plugins/tcplog/handler.lua index 17f1f47bc387..febfedbd5c78 100644 --- a/kong/plugins/tcplog/handler.lua +++ b/kong/plugins/tcplog/handler.lua @@ -1,6 +1,5 @@ -local log = require "kong.plugins.tcplog.log" local BasePlugin = require "kong.plugins.base_plugin" -local basic_serializer = require "kong.plugins.log_serializers.basic" +local log = require "kong.plugins.tcplog.log" local TcpLogHandler = BasePlugin:extend() @@ -10,9 +9,7 @@ end function TcpLogHandler:log(conf) TcpLogHandler.super.log(self) - - local message = basic_serializer.serialize(ngx) - log.execute(conf, message) + log.execute(conf) end return TcpLogHandler diff --git a/kong/plugins/tcplog/log.lua b/kong/plugins/tcplog/log.lua index 1aa19b1bf5da..39112e2da009 100644 --- a/kong/plugins/tcplog/log.lua +++ b/kong/plugins/tcplog/log.lua @@ -30,8 +30,8 @@ local function log(premature, conf, message) end end -function _M.execute(conf, message) - local ok, err = ngx.timer.at(0, log, conf, message) +function _M.execute(conf) + local ok, err = ngx.timer.at(0, log, conf, ngx.ctx.log_message) if not ok then ngx.log(ngx.ERR, "[tcplog] failed to create timer: ", err) end diff --git a/kong/plugins/udplog/handler.lua b/kong/plugins/udplog/handler.lua index 44f34e4adda2..be1fd5bad75a 100644 --- a/kong/plugins/udplog/handler.lua +++ b/kong/plugins/udplog/handler.lua @@ -1,6 +1,7 @@ -local log = require "kong.plugins.udplog.log" +-- Copyright (C) Mashape, Inc. + local BasePlugin = require "kong.plugins.base_plugin" -local basic_serializer = require "kong.plugins.log_serializers.basic" +local log = require "kong.plugins.udplog.log" local UdpLogHandler = BasePlugin:extend() @@ -10,9 +11,7 @@ end function UdpLogHandler:log(conf) UdpLogHandler.super.log(self) - - local message = basic_serializer.serialize(ngx) - log.execute(conf, message) + log.execute(conf) end return UdpLogHandler diff --git a/kong/plugins/udplog/log.lua b/kong/plugins/udplog/log.lua index 94503c7939fd..651417473c1b 100644 --- a/kong/plugins/udplog/log.lua +++ b/kong/plugins/udplog/log.lua @@ -28,8 +28,8 @@ local function log(premature, conf, message) end end -function _M.execute(conf, message) - local ok, err = ngx.timer.at(0, log, conf, message) +function _M.execute(conf) + local ok, err = ngx.timer.at(0, log, conf, ngx.ctx.log_message) if not ok then ngx.log(ngx.ERR, "failed to create timer: ", err) end diff --git a/kong/resolver/header_filter.lua b/kong/resolver/header_filter.lua index d5c1b40440b3..2dec78edff09 100644 --- a/kong/resolver/header_filter.lua +++ b/kong/resolver/header_filter.lua @@ -1,10 +1,11 @@ local constants = require "kong.constants" +local timestamp = require "kong.tools.timestamp" local _M = {} function _M.execute(conf) local api_time = ngx.ctx.proxy_ended_at - ngx.ctx.proxy_started_at - ngx.header[constants.HEADERS.PROXY_TIME] = ngx.now() * 1000 - ngx.ctx.started_at - api_time + ngx.header[constants.HEADERS.PROXY_TIME] = timestamp.get_utc() - ngx.ctx.started_at - api_time ngx.header[constants.HEADERS.API_TIME] = api_time end diff --git a/kong/tools/ngx_stub.lua b/kong/tools/ngx_stub.lua index bce037c8ec7a..89ef48936359 100644 --- a/kong/tools/ngx_stub.lua +++ b/kong/tools/ngx_stub.lua @@ -9,7 +9,6 @@ _G.ngx = { exit = function() end, say = function() end, log = function() end, - socket = { tcp = {} }, time = function() return os.time() end, re = { match = reg.match diff --git a/kong/tools/timestamp.lua b/kong/tools/timestamp.lua index 525ca0a401b7..4b84e15f53b3 100644 --- a/kong/tools/timestamp.lua +++ b/kong/tools/timestamp.lua @@ -44,4 +44,4 @@ function _M.get_timestamps(now) } end -return _M +return _M \ No newline at end of file diff --git a/spec/integration/proxy/resolver_spec.lua b/spec/integration/proxy/resolver_spec.lua index ddbcc0d5320f..1d509e2be26b 100644 --- a/spec/integration/proxy/resolver_spec.lua +++ b/spec/integration/proxy/resolver_spec.lua @@ -25,10 +25,10 @@ describe("Resolver", function() spec_helper.prepare_db() spec_helper.insert_fixtures { api = { - { name = "tests host resolver 1", public_dns = "mockbin.com", target_url = "http://mockbin.com" }, - { name = "tests host resolver 2", public_dns = "mockbin-auth.com", target_url = "http://mockbin.com" }, - { name = "tests path resolver", public_dns = "mockbin-path.com", target_url = "http://mockbin.com", path = "/status/" }, - { name = "tests stripped path resolver", public_dns = "mockbin-stripped-path.com", target_url = "http://mockbin.com", path = "/mockbin/", strip_path = true } + { name = "tests host resolver 1", public_dns = "mocbkin.com", target_url = "http://mockbin.com" }, + { name = "tests host resolver 2", public_dns = "mocbkin-auth.com", target_url = "http://mockbin.com" }, + { name = "tests path resolver", public_dns = "mocbkin-path.com", target_url = "http://mockbin.com", path = "/status/" }, + { name = "tests stripped path resolver", public_dns = "mocbkin-stripped-path.com", target_url = "http://mockbin.com", path = "/mockbin/", strip_path = true } }, plugin_configuration = { { name = "keyauth", value = {key_names = {"apikey"} }, __api = 2 } @@ -55,7 +55,7 @@ describe("Resolver", function() describe("SSL", function() it("should work when calling SSL port", function() - local response, status = http_client.get(STUB_GET_SSL_URL, nil, { host = "mockbin.com" }) + local response, status = http_client.get(STUB_GET_SSL_URL, nil, { host = "mocbkin.com" }) assert.are.equal(200, status) assert.truthy(response) local parsed_response = cjson.decode(response) @@ -102,17 +102,17 @@ describe("Resolver", function() describe("By Host", function() it("should proxy when the API is in Kong", function() - local _, status = http_client.get(STUB_GET_URL, nil, { host = "mockbin.com"}) + local _, status = http_client.get(STUB_GET_URL, nil, { host = "mocbkin.com"}) assert.are.equal(200, status) end) it("should proxy when the Host header is not trimmed", function() - local _, status = http_client.get(STUB_GET_URL, nil, { host = " mockbin.com "}) + local _, status = http_client.get(STUB_GET_URL, nil, { host = " mocbkin.com "}) assert.are.equal(200, status) end) it("should proxy when the request has no Host header but the X-Host-Override header", function() - local _, status = http_client.get(STUB_GET_URL, nil, { ["X-Host-Override"] = "mockbin.com"}) + local _, status = http_client.get(STUB_GET_URL, nil, { ["X-Host-Override"] = "mocbkin.com"}) assert.are.equal(200, status) end) @@ -130,8 +130,8 @@ describe("Resolver", function() it("should proxy and strip the path if `strip_path` is true", function() local response, status = http_client.get(spec_helper.PROXY_URL.."/mockbin/request") - assert.are.equal(200, status) local body = cjson.decode(response) + assert.are.equal(200, status) assert.are.equal("http://mockbin.com/request", body.url) end) @@ -146,14 +146,14 @@ describe("Resolver", function() end) it("should return the correct Server and Via headers when the request was proxied", function() - local _, status, headers = http_client.get(STUB_GET_URL, nil, { host = "mockbin.com"}) + local _, status, headers = http_client.get(STUB_GET_URL, nil, { host = "mocbkin.com"}) assert.are.equal(200, status) assert.are.equal("cloudflare-nginx", headers.server) assert.are.equal(constants.NAME.."/"..constants.VERSION, headers.via) end) it("should return the correct Server and no Via header when the request was NOT proxied", function() - local _, status, headers = http_client.get(STUB_GET_URL, nil, { host = "mockbin-auth.com"}) + local _, status, headers = http_client.get(STUB_GET_URL, nil, { host = "mocbkin-auth.com"}) assert.are.equal(403, status) assert.are.equal(constants.NAME.."/"..constants.VERSION, headers.server) assert.falsy(headers.via) diff --git a/spec/plugins/analytics/alf_serializer_spec.lua b/spec/plugins/analytics/alf_serializer_spec.lua deleted file mode 100644 index a429bf8d8b29..000000000000 --- a/spec/plugins/analytics/alf_serializer_spec.lua +++ /dev/null @@ -1,131 +0,0 @@ -local fixtures = require "spec.plugins.analytics.fixtures.requests" -local ALFSerializer = require "kong.plugins.log_serializers.alf" - --- @see http://lua-users.org/wiki/CopyTable -local function deepcopy(orig) - local copy = {} - if type(orig) == "table" then - for orig_key, orig_value in next, orig, nil do - copy[deepcopy(orig_key)] = deepcopy(orig_value) - end - else -- number, string, boolean, etc - copy = orig - end - return copy -end - -local function sameEntry(state, arguments) - local fixture_entry = deepcopy(arguments[1]) - local entry = deepcopy(arguments[2]) - - local delta = 0.000000000000001 - -- Compare timings - for k, fixture_timer in pairs(fixture_entry.timings) do - assert.True(math.abs(entry.timings[k] - fixture_timer) < delta) - end - - -- Compare time property - assert.True(math.abs(entry.time - fixture_entry.time) < delta) - - -- Compare things that are not computed in the same order depending on the platform - assert.are.equal(#(fixture_entry.request.headers), #(entry.request.headers)) - assert.are.equal(#(fixture_entry.request.queryString), #(entry.request.queryString)) - assert.are.equal(#(fixture_entry.response.headers), #(entry.response.headers)) - - entry.time = nil - entry.timings = nil - fixture_entry.time = nil - fixture_entry.timings = nil - - entry.request.headers = nil - entry.response.headers = nil - entry.request.queryString = nil - fixture_entry.request.headers = nil - fixture_entry.response.headers = nil - fixture_entry.request.queryString = nil - - assert.are.same(fixture_entry, entry) - - return true -end - -local say = require("say") -say:set("assertion.sameEntry.positive", "Not the same entries") -say:set("assertion.sameEntry.negative", "Not the same entries") -assert:register("assertion", "sameEntry", sameEntry, "assertion.sameEntry.positive", "assertion.sameEntry.negative") - -describe("ALF serializer", function() - - local alf - - describe("#new_alf()", function () - - it("should create a new ALF", function() - alf = ALFSerializer:new_alf() - assert.same({ - version = "1.0.0", - serviceToken = "", - har = { - log = { - version = "1.2", - creator = { name = "kong-mashape-analytics-plugin", version = "1.0.0" - }, - entries = {} - } - } - }, alf) - end) - - end) - - describe("#serialize_entry()", function() - - it("should serialize an ngx GET request/response", function() - local entry = alf:serialize_entry(fixtures.GET.NGX_STUB) - assert.are.sameEntry(fixtures.GET.ENTRY, entry) - end) - - end) - - describe("#add_entry()", function() - - it("should add the entry to the serializer entries property", function() - alf:add_entry(fixtures.GET.NGX_STUB) - assert.equal(1, table.getn(alf.har.log.entries)) - assert.are.sameEntry(fixtures.GET.ENTRY, alf.har.log.entries[1]) - - alf:add_entry(fixtures.GET.NGX_STUB) - assert.equal(2, table.getn(alf.har.log.entries)) - assert.are.sameEntry(fixtures.GET.ENTRY, alf.har.log.entries[2]) - end) - - it("#new_alf() should instanciate a new ALF that has nothing to do with the existing one", function() - local other_alf = ALFSerializer:new_alf() - assert.are_not_same(alf, other_alf) - end) - - end) - - describe("#to_json_string()", function() - - it("should throw an error if no token was given", function() - assert.has_error(function() alf:to_json_string() end, - "Mashape Analytics serviceToken required") - end) - - it("should return a JSON string", function() - local json_str = alf:to_json_string("stub_service_token") - assert.equal("string", type(json_str)) - end) - - end) - - describe("#flush_entries()", function() - - it("should remove any existing entry", function() - alf:flush_entries() - assert.equal(0, table.getn(alf.har.log.entries)) - end) - - end) -end) diff --git a/spec/plugins/analytics/fixtures/requests.lua b/spec/plugins/analytics/fixtures/requests.lua deleted file mode 100644 index b62d90a3192f..000000000000 --- a/spec/plugins/analytics/fixtures/requests.lua +++ /dev/null @@ -1,95 +0,0 @@ -local EMPTY_ARRAY_PLACEHOLDER = "__empty_array_placeholder__" - -return { - ["GET"] = { - ["NGX_STUB"] = { - req = { - get_method = function() return "GET" end, - http_version = function() return 1.1 end, - get_headers = function() return {["Accept"]="/*/",["Host"]="mockbin.com"} end, - get_uri_args = function() return {["hello"]="world",["foo"]="bar"} end, - get_post_args = function() return {["hello"]={"world", "earth"}} end - }, - resp = { - get_headers = function() return {["Connection"]="close",["Content-Type"]="application/json",["Content-Length"]="934"} end - }, - status = 200, - var = { - scheme = "http", - host = "mockbin.com", - uri = "/request", - request_length = 123, - body_bytes_sent = 934, - remote_addr = "127.0.0.1" - }, - ctx = { - started_at = 1432844571.623, - proxy_started_at = 1432844571.719, - proxy_ended_at = 1432844572.11, - analytics = { - req_body = "hello=world&hello=earth", - res_body = "{\"message\":\"response body\"}", - response_received = 1432844572.11 - } - } - }, - ["ENTRY"] = { - cache = {}, - clientIPAddress = "127.0.0.1", - request = { - bodySize = 23, - cookies = {EMPTY_ARRAY_PLACEHOLDER}, - headers = { - { name = "Accept", value = "/*/"}, - { name = "Host", value = "mockbin.com" } - }, - headersSize = 24, - httpVersion = "HTTP/1.1", - method = "GET", - postData = { - mimeType = "application/octet-stream", - params = { - { name = "hello", value = "world" }, - { name = "hello", value = "earth" } - }, - text = "hello=world&hello=earth" - }, - queryString = { - { name = "foo", value = "bar" }, - { name = "hello", value = "world" } - }, - url = "http://mockbin.com/request" - }, - response = { - bodySize = 934, - content = { - mimeType = "application/json", - size = 934, - text = "{\"message\":\"response body\"}" - }, - cookies = {EMPTY_ARRAY_PLACEHOLDER}, - headers = { - { name = "Content-Length", value = "934" }, - { name = "Content-Type", value = "application/json" }, - { name = "Connection", value = "close" } - }, - headersSize = 60, - httpVersion = "", - redirectURL = "", - status = 200, - statusText = "" - }, - startedDateTime = "2015-05-28T20:22:51Z", - time = 0.487, - timings = { - blocked = -1, - connect = -1, - dns = -1, - receive = 0, - send = 0.096, - ssl = -1, - wait = 0.391 - } - } - } -} diff --git a/spec/plugins/logging_spec.lua b/spec/plugins/logging_spec.lua index bdae1aa014bf..b829bd2f8356 100644 --- a/spec/plugins/logging_spec.lua +++ b/spec/plugins/logging_spec.lua @@ -1,5 +1,4 @@ local IO = require "kong.tools.io" -local yaml = require "yaml" local uuid = require "uuid" local cjson = require "cjson" local stringy = require "stringy" diff --git a/spec/unit/statics_spec.lua b/spec/unit/statics_spec.lua index aef0b189fe7a..1919a506482a 100644 --- a/spec/unit/statics_spec.lua +++ b/spec/unit/statics_spec.lua @@ -52,7 +52,6 @@ plugins_available: - request_transformer - response_transformer - requestsizelimiting - - analytics ## The Kong working directory ## (Make sure you have read and write permissions) diff --git a/spec/unit/stub_coverage_spec.lua b/spec/unit/stub_coverage_spec.lua index 014454f3a460..8a45c1feeb29 100644 --- a/spec/unit/stub_coverage_spec.lua +++ b/spec/unit/stub_coverage_spec.lua @@ -2,7 +2,6 @@ -- since not all files are currently unit tested and the coverage is erroneous. local IO = require "kong.tools.io" -require "kong.tools.ngx_stub" -- Stub DAO for lapis controllers _G.dao = {}