55"""
66
77import os
8+ import re
89import socket
910from asyncio import Lock
1011from copy import copy
@@ -287,7 +288,7 @@ def get_client_uri(self, protocol, host, port, proxied_path):
287288
288289 return client_uri
289290
290- def _build_proxy_request (self , host , port , proxied_path , body ):
291+ def _build_proxy_request (self , host , port , proxied_path , body , ** extra_opts ):
291292 headers = self .proxy_request_headers ()
292293
293294 client_uri = self .get_client_uri ("http" , host , port , proxied_path )
@@ -307,6 +308,7 @@ def _build_proxy_request(self, host, port, proxied_path, body):
307308 decompress_response = False ,
308309 headers = headers ,
309310 ** self .proxy_request_options (),
311+ ** extra_opts ,
310312 )
311313 return req
312314
@@ -365,7 +367,6 @@ async def proxy(self, host, port, proxied_path):
365367 body = b""
366368 else :
367369 body = None
368-
369370 if self .unix_socket is not None :
370371 # Port points to a Unix domain socket
371372 self .log .debug ("Making client for Unix socket %r" , self .unix_socket )
@@ -374,8 +375,97 @@ async def proxy(self, host, port, proxied_path):
374375 force_instance = True , resolver = UnixResolver (self .unix_socket )
375376 )
376377 else :
377- client = httpclient .AsyncHTTPClient ()
378+ client = httpclient .AsyncHTTPClient (force_instance = True )
379+ # check if the request is stream request
380+ accept_header = self .request .headers .get ("Accept" )
381+ if accept_header == "text/event-stream" :
382+ return await self ._proxy_progressive (host , port , proxied_path , body , client )
383+ else :
384+ return await self ._proxy_buffered (host , port , proxied_path , body , client )
385+
386+ async def _proxy_progressive (self , host , port , proxied_path , body , client ):
387+ # Proxy in progressive flush mode, whenever chunks are received. Potentially slower but get results quicker for voila
388+ # Set up handlers so we can progressively flush result
389+
390+ headers_raw = []
391+
392+ def dump_headers (headers_raw ):
393+ for line in headers_raw :
394+ r = re .match ("^([a-zA-Z0-9\\ -_]+)\\ s*\\ :\\ s*([^\r \n ]+)[\r \n ]*$" , line )
395+ if r :
396+ k , v = r .groups ([1 , 2 ])
397+ if k not in (
398+ "Content-Length" ,
399+ "Transfer-Encoding" ,
400+ "Content-Encoding" ,
401+ "Connection" ,
402+ ):
403+ # some header appear multiple times, eg 'Set-Cookie'
404+ self .set_header (k , v )
405+ else :
406+ r = re .match (r"^HTTP[^\s]* ([0-9]+)" , line )
407+ if r :
408+ status_code = r .group (1 )
409+ self .set_status (int (status_code ))
410+ headers_raw .clear ()
411+
412+ # clear tornado default header
413+ self ._headers = httputil .HTTPHeaders ()
414+
415+ def header_callback (line ):
416+ headers_raw .append (line )
417+
418+ def streaming_callback (chunk ):
419+ # record activity at start and end of requests
420+ self ._record_activity ()
421+ # Do this here, not in header_callback so we can be sure headers are out of the way first
422+ dump_headers (
423+ headers_raw
424+ ) # array will be empty if this was already called before
425+ self .write (chunk )
426+ self .flush ()
427+
428+ # Now make the request
429+
430+ req = self ._build_proxy_request (
431+ host ,
432+ port ,
433+ proxied_path ,
434+ body ,
435+ streaming_callback = streaming_callback ,
436+ header_callback = header_callback ,
437+ )
438+
439+ # no timeout for stream api
440+ req .request_timeout = 7200
441+ req .connect_timeout = 600
442+
443+ try :
444+ response = await client .fetch (req , raise_error = False )
445+ except httpclient .HTTPError as err :
446+ if err .code == 599 :
447+ self ._record_activity ()
448+ self .set_status (599 )
449+ self .write (str (err ))
450+ return
451+ else :
452+ raise
453+
454+ # For all non http errors...
455+ if response .error and type (response .error ) is not httpclient .HTTPError :
456+ self .set_status (500 )
457+ self .write (str (response .error ))
458+ else :
459+ self .set_status (
460+ response .code , response .reason
461+ ) # Should already have been set
462+
463+ dump_headers (headers_raw ) # Should already have been emptied
464+
465+ if response .body : # Likewise, should already be chunked out and flushed
466+ self .write (response .body )
378467
468+ async def _proxy_buffered (self , host , port , proxied_path , body , client ):
379469 req = self ._build_proxy_request (host , port , proxied_path , body )
380470
381471 self .log .debug (f"Proxying request to { req .url } " )
0 commit comments