diff --git a/CHANGES b/CHANGES index 16463e3cb25..cec55b67025 100644 --- a/CHANGES +++ b/CHANGES @@ -1,6 +1,8 @@ -*- coding: utf-8 -*- Changes with Apache Traffic Server 5.0.0 + *) [TS-2584] Fix failed assert transforming and caching negative responses + *) [TS-2582] Make traffic_cop debugging eadier by logging to stdout. *) [TS-2579] Remove ipv4 limit for FetchSM and TSFetchUrl/TSFetchPages. diff --git a/plugins/experimental/metalink/test/negativeCaching b/plugins/experimental/metalink/test/negativeCaching new file mode 100755 index 00000000000..59466572b10 --- /dev/null +++ b/plugins/experimental/metalink/test/negativeCaching @@ -0,0 +1,72 @@ +#!/usr/bin/env python + +print '''1..1 negativeCaching +# Negative caching doesn't crash the proxy''' + +from twisted.internet import error, protocol, reactor, tcp +from twisted.web import http + +def callback(): + print 'not ok 1 - Why didn\'t the test finish yet?' + + reactor.stop() + +reactor.callLater(1, callback) + +class factory(http.HTTPFactory): + class protocol(http.HTTPChannel): + class requestFactory(http.Request): + def requestReceived(ctx, method, target, version): + + ctx.client = None + ctx.clientproto = version + + ctx.setResponseCode(404) + + # Don't send a Content-Length header or a final chunk because + # HttpTunnel::producer_run() sends an HTTP_TUNNEL_EVENT_PRECOMPLETE + # event at the end of the content which causes + # HttpSM::tunnel_handler_server() to turn off negative caching. + # + # Write some content to reach + # ink_assert(cache_info->response_get()->valid()) in + # HttpTransact::set_headers_for_cache_write(). It's called when + # TransformTerminus sends a TRANSFORM_READ_READY event, but the + # Metalink transform only reenabled TransformTerminus at the end of the + # content or if more content was available. + ctx.write('negativeCaching') + +server = tcp.Port(0, factory()) +server.startListening() + +print '# Listening on {0}:{1}'.format(*server.socket.getsockname()) + +class factory(protocol.ClientFactory): + def clientConnectionFailed(ctx, connector, reason): + + print 'Bail out!' + reason.printTraceback() + + reactor.stop() + + class protocol(http.HTTPClient): + def connectionLost(ctx, reason): + try: + reactor.stop() + + except error.ReactorNotRunning: + pass + + else: + print 'not ok 1 - Did the proxy crash? (The client connection closed.)' + + connectionMade = lambda ctx: ctx.transport.write('GET {0}:{1} HTTP/1.1\r\n\r\n'.format(*server.socket.getsockname())) + + def handleStatus(ctx, version, status, message): + print 'ok 1 - The proxy didn\'t crash (got a reasponse status)' + + reactor.stop() + +tcp.Connector('localhost', 8080, factory(), 30, None, reactor).connect() + +reactor.run() diff --git a/proxy/http/HttpTransact.cc b/proxy/http/HttpTransact.cc index 9692d998a2d..fb16688a2e8 100644 --- a/proxy/http/HttpTransact.cc +++ b/proxy/http/HttpTransact.cc @@ -4783,6 +4783,11 @@ HttpTransact::set_headers_for_cache_write(State* s, HTTPInfo* cache_info, HTTPHd if (!cache_info->valid()) { cache_info->create(); + cache_info->response_set(response); + } else if (!s->negative_caching) { + cache_info->response_set(response); + } else { + ink_assert(cache_info->response_get()->valid()); } /* Store the requested URI */ @@ -4805,11 +4810,6 @@ HttpTransact::set_headers_for_cache_write(State* s, HTTPInfo* cache_info, HTTPHd request->url_set(s->hdr_info.client_request.url_get()); } cache_info->request_set(request); - if (!s->negative_caching) - cache_info->response_set(response); - else { - ink_assert(cache_info->response_get()->valid()); - } if (s->api_server_request_body_set) cache_info->request_get()->method_set(HTTP_METHOD_GET, HTTP_LEN_GET);