Welcome! Log In Create A New Profile

Advanced

Re: epoll_wait() reported that client prematurely closed connection

September 18, 2014 01:49AM
Проблема в том, что на проксируемом сервере происходят ошибки типа "An exception occured writing the response entity. Broken pipe", их нужно устранить.
По всей видимости они возникают из-за того, что nginx рвет соединение с проксируемым сервером, не уверены что это не будет приводить к утечкам памяти в следствии
такого типа завершения соединения.

CentOS release 6.5
Linux version 2.6.32-431.el6.x86_64
nginx/1.5.8

Содержимое файла конфигурации:

user nginx;

worker_processes 8;
timer_resolution 100ms;
worker_rlimit_nofile 50000;
worker_priority -5;

error_log /var/log/nginx/error.log info;
pid /var/run/nginx.pid;

events {
worker_connections 25000;
use epoll;
}

http {

include mime.types;
default_type application/x-javascript;

log_format main '$remote_addr - $remote_user [$time_local] $request '
'"$status" $body_bytes_sent "$http_referer" '
'"$http_user_agent" "$http_x_forwarded_for"';

chunked_transfer_encoding off;

gzip on;
gzip_static on;
gzip_min_length 640;
gzip_buffers 64 8k;
gzip_comp_level 4;
gzip_http_version 1.1;
gzip_proxied any;
gzip_types text/plain application/xml application/x-javascript text/css;
gzip_disable "MSIE [1-6]\.(?!.*SV1)";
gzip_vary on;

output_buffers 32 512k;
sendfile_max_chunk 128k;
postpone_output 1460;
server_names_hash_bucket_size 64;

tcp_nopush on;
tcp_nodelay on;

client_max_body_size 1m;
client_body_buffer_size 128k;
client_header_buffer_size 1k;
large_client_header_buffers 4 4k;

keepalive_timeout 45 45;
client_header_timeout 45;
client_body_timeout 45;
send_timeout 45;
reset_timedout_connection on;

memcached_connect_timeout 60s;
memcached_read_timeout 60s;
memcached_send_timeout 60s;

charset utf-8;
source_charset utf-8;
ignore_invalid_headers on;
keepalive_requests 100;
recursive_error_pages off;
server_tokens off;
server_name_in_redirect off;
sendfile on;

open_file_cache max=1000 inactive=20s;
open_file_cache_valid 30s;
open_file_cache_min_uses 2;
open_file_cache_errors on;

#######################################################################
# PUSH_STREAM_MODULE GLOBAL SETTINGS (COMET)
#######################################################################

#The size of the memory chunk this module will use to store published messages,
#channels and other shared structures. When this memory is full any new request
#for publish a message or subscribe a channel will receive an 500 Internal Server Error response.
push_stream_shared_memory_size 100M;

#Maximum permissible channel id length (number of characters).
#Longer ids will receive an 400 Bad Request response. I
push_stream_max_channel_id_length 50;

#The length of time a subscriber will stay connected before it is considered expired and disconnected.
#If you do not want subscribers to be automatically disconnected, just not set this directive.
#But, this operation is very important to help Nginx recycle memory consumed to send messages to susbscriber,
#allocated at pool request.
push_stream_subscriber_connection_ttl 5m;
push_stream_longpolling_connection_ttl 5m;

push_stream_wildcard_channel_prefix "broadcast_";
proxy_cache_path /var/cache/nginx/ftl levels=1:2 keys_zone=ftl-cache:20m max_size=100m inactive=120m;

upstream memcached_cluster {
server 127.0.0.1:11211;
hash $uri/3.8;
hash_again 1000;
keepalive 512;
}

server {
listen *:80;
server_name_in_redirect off;
server_name test.example.com;

proxy_connect_timeout 60s;
proxy_send_timeout 60s;
proxy_read_timeout 60s;

proxy_buffering on;
proxy_buffer_size 64k;
proxy_buffers 4 64k;
proxy_busy_buffers_size 128k;
proxy_temp_file_write_size 10m;
proxy_headers_hash_bucket_size 256;

proxy_set_header Host $host:$server_port;
proxy_set_header X-Real-IP $remote_addr;
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
access_log /var/log/nginx/nginx.log main;
log_not_found off;
root /var/spool/nginx/;

location /portal-facade-ng/v1/btv/imageMap/ {
if ($request_method != GET) {
proxy_pass http://127.0.0.1:8080;
break;
}

add_header Cache-Control no-cache;
add_header Content-Type image/png;
default_type image/png;
set $memcached_key "$uri/3.8";
memcached_pass memcached_cluster;
}


location /portal-facade-ng/v1/btv/epg/current/ {
if ($request_method != GET) {
proxy_pass http://127.0.0.1:8080;
break;
}
set $memcached_key "$uri/3.8";
memcached_pass memcached_cluster;
}

location /portal-facade-ng/v1/btv/epgGrid/bar/image/ {
if ($request_method != GET) {
proxy_pass http://127.0.0.1:8080;
break;
}

add_header Cache-Control no-cache;
add_header Content-Type image/png;
default_type image/png;
set $memcached_key "$uri/3.8";
memcached_pass memcached_cluster;
}
}
}
Subject Author Posted

epoll_wait() reported that client prematurely closed connection

ole-lukoje September 17, 2014 10:38AM

Re: epoll_wait() reported that client prematurely closed connection

Валентин Бартенев September 17, 2014 11:14AM

Re: epoll_wait() reported that client prematurely closed connection

ole-lukoje September 18, 2014 01:49AM

Re: epoll_wait() reported that client prematurely closed connection

Vadim Lazovskiy September 18, 2014 02:04AM

Re: epoll_wait() reported that client prematurely closed connection

ole-lukoje September 18, 2014 03:03AM

Re: epoll_wait() reported that client prematurely closed connection

Валентин Бартенев September 18, 2014 04:18AM

Re: epoll_wait() reported that client prematurely closed connection

ole-lukoje September 19, 2014 09:52AM

Re: epoll_wait() reported that client prematurely closed connection

Валентин Бартенев September 19, 2014 10:22AM

Re: epoll_wait() reported that client prematurely closed connection

ole-lukoje October 03, 2014 09:16AM



Sorry, only registered users may post in this forum.

Click here to login

Online Users

Guests: 323
Record Number of Users: 8 on April 13, 2023
Record Number of Guests: 421 on December 02, 2018
Powered by nginx      Powered by FreeBSD      PHP Powered      Powered by MariaDB      ipv6 ready