ceph-csi/vendor/k8s.io/kubernetes/test/images/echoserver/nginx.conf

104 lines
2.8 KiB
Nginx Configuration File
Raw Normal View History

2018-11-26 18:23:56 +00:00
load_module modules/ndk_http_module.so;
load_module modules/ngx_http_lua_module.so;
load_module modules/ngx_http_lua_upstream_module.so;
events {
worker_connections 1024;
}
env HOSTNAME;
env NODE_NAME;
env POD_NAME;
env POD_NAMESPACE;
env POD_IP;
http {
default_type 'text/plain';
# maximum allowed size of the client request body. By default this is 1m.
# Request with bigger bodies nginx will return error code 413.
# http://nginx.org/en/docs/http/ngx_http_core_module.html#client_max_body_size
client_max_body_size 10m;
init_by_lua_block {
local template = require("template")
-- template syntax documented here:
-- https://github.com/bungle/lua-resty-template/blob/master/README.md
tmpl = template.compile([[
Hostname: {{os.getenv("HOSTNAME") or "N/A"}}
Pod Information:
{% if os.getenv("POD_NAME") then %}
node name: {{os.getenv("NODE_NAME") or "N/A"}}
pod name: {{os.getenv("POD_NAME") or "N/A"}}
pod namespace: {{os.getenv("POD_NAMESPACE") or "N/A"}}
pod IP: {{os.getenv("POD_IP") or "N/A"}}
{% else %}
-no pod information available-
{% end %}
Server values:
server_version=nginx: {{ngx.var.nginx_version}} - lua: {{ngx.config.ngx_lua_version}}
Request Information:
client_address={{ngx.var.remote_addr}}
method={{ngx.req.get_method()}}
real path={{ngx.var.request_uri}}
query={{ngx.var.query_string or ""}}
request_version={{ngx.req.http_version()}}
request_scheme={{ngx.var.scheme}}
request_uri={{ngx.var.scheme.."://"..ngx.var.host..":"..ngx.var.server_port..ngx.var.request_uri}}
Request Headers:
{% for i, key in ipairs(keys) do %}
{% local val = headers[key] %}
{% if type(val) == "table" then %}
{% for i = 1,#val do %}
{{key}}={{val[i]}}
{% end %}
{% else %}
{{key}}={{val}}
{% end %}
{% end %}
Request Body:
{{ngx.var.request_body or " -no body in request-"}}
]])
}
server {
# please check the benefits of reuseport https://www.nginx.com/blog/socket-sharding-nginx-release-1-9-1
# basically instructs to create an individual listening socket for each worker process (using the SO_REUSEPORT
# socket option), allowing a kernel to distribute incoming connections between worker processes.
listen 8080 default_server reuseport;
listen 8443 default_server ssl http2 reuseport;
ssl_certificate /certs/certificate.crt;
ssl_certificate_key /certs/privateKey.key;
# Replace '_' with your hostname.
server_name _;
# set long keepalive_timeout because some loadbalancer proxies expect the connection
# to remain open for at least ten minutes.
keepalive_timeout 620s;
location / {
lua_need_request_body on;
content_by_lua_block {
ngx.header["Server"] = "echoserver"
local headers = ngx.req.get_headers()
local keys = {}
for key, val in pairs(headers) do
table.insert(keys, key)
end
table.sort(keys)
ngx.say(tmpl({os=os, ngx=ngx, keys=keys, headers=headers}))
}
}
}
}