Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 2 additions & 0 deletions CHANGES
Original file line number Diff line number Diff line change
@@ -1,6 +1,8 @@
-*- coding: utf-8 -*-
Changes with Apache Traffic Server 5.0.0

*) [TS-2584] Fix failed assert transforming and caching negative responses

*) [TS-2582] Make traffic_cop debugging eadier by logging to stdout.

*) [TS-2579] Remove ipv4 limit for FetchSM and TSFetchUrl/TSFetchPages.
Expand Down
72 changes: 72 additions & 0 deletions plugins/experimental/metalink/test/negativeCaching
Original file line number Diff line number Diff line change
@@ -0,0 +1,72 @@
#!/usr/bin/env python

print '''1..1 negativeCaching
# Negative caching doesn't crash the proxy'''

from twisted.internet import error, protocol, reactor, tcp
from twisted.web import http

def callback():
print 'not ok 1 - Why didn\'t the test finish yet?'

reactor.stop()

reactor.callLater(1, callback)

class factory(http.HTTPFactory):
class protocol(http.HTTPChannel):
class requestFactory(http.Request):
def requestReceived(ctx, method, target, version):

ctx.client = None
ctx.clientproto = version

ctx.setResponseCode(404)

# Don't send a Content-Length header or a final chunk because
# HttpTunnel::producer_run() sends an HTTP_TUNNEL_EVENT_PRECOMPLETE
# event at the end of the content which causes
# HttpSM::tunnel_handler_server() to turn off negative caching.
#
# Write some content to reach
# ink_assert(cache_info->response_get()->valid()) in
# HttpTransact::set_headers_for_cache_write(). It's called when
# TransformTerminus sends a TRANSFORM_READ_READY event, but the
# Metalink transform only reenabled TransformTerminus at the end of the
# content or if more content was available.
ctx.write('negativeCaching')

server = tcp.Port(0, factory())
server.startListening()

print '# Listening on {0}:{1}'.format(*server.socket.getsockname())

class factory(protocol.ClientFactory):
def clientConnectionFailed(ctx, connector, reason):

print 'Bail out!'
reason.printTraceback()

reactor.stop()

class protocol(http.HTTPClient):
def connectionLost(ctx, reason):
try:
reactor.stop()

except error.ReactorNotRunning:
pass

else:
print 'not ok 1 - Did the proxy crash? (The client connection closed.)'

connectionMade = lambda ctx: ctx.transport.write('GET {0}:{1} HTTP/1.1\r\n\r\n'.format(*server.socket.getsockname()))

def handleStatus(ctx, version, status, message):
print 'ok 1 - The proxy didn\'t crash (got a reasponse status)'

reactor.stop()

tcp.Connector('localhost', 8080, factory(), 30, None, reactor).connect()

reactor.run()
10 changes: 5 additions & 5 deletions proxy/http/HttpTransact.cc
Original file line number Diff line number Diff line change
Expand Up @@ -4783,6 +4783,11 @@ HttpTransact::set_headers_for_cache_write(State* s, HTTPInfo* cache_info, HTTPHd

if (!cache_info->valid()) {
cache_info->create();
cache_info->response_set(response);
} else if (!s->negative_caching) {
cache_info->response_set(response);
} else {
ink_assert(cache_info->response_get()->valid());
}

/* Store the requested URI */
Expand All @@ -4805,11 +4810,6 @@ HttpTransact::set_headers_for_cache_write(State* s, HTTPInfo* cache_info, HTTPHd
request->url_set(s->hdr_info.client_request.url_get());
}
cache_info->request_set(request);
if (!s->negative_caching)
cache_info->response_set(response);
else {
ink_assert(cache_info->response_get()->valid());
}

if (s->api_server_request_body_set)
cache_info->request_get()->method_set(HTTP_METHOD_GET, HTTP_LEN_GET);
Expand Down