I am writing a tcp proxy with Twisted framework and need a simple client failover. If proxy can not connect to one backend, then connect to next one in the list. I used
reactor.connectTCP(host, port, factory) for proxy till I came to this task, but it does not spit out error if it can not connect. How can I catch, that it can not connect and try other host or should I use some other connection method?
You can use a deferred to do that
class MyClientFactory(ClientFactory):
protocol = ClientProtocol
def __init__(self, request):
self.request = request
self.deferred = defer.Deferred()
def handleReply(self, command, reply):
# Handle the reply
self.deferred.callback(0)
def clientConnectionFailed(self, connector, reason):
self.deferred.errback(reason)
def send(_, host, port, msg):
factory = MyClientFactory(msg)
reactor.connectTCP(host, port, factory)
return factory.deferred
d = Deferred()
d.addErrback(send, host1, port1, msg1)
d.addErrback(send, host2, port2, msg2)
# ...
d.addBoth(lambda _: print "finished")
This will trigger the next errback if the first one fails, otherwise goto the print function.
Related
My two dependent class methods develop a Protocol to define the connection strategy and a factory to open the connection. I have instigated a reactor to run this by connecting to a local host and a port, however I get the following error:
Connection failed. Reason: [Failure instance: Traceback (failure with no frames): <class 'twisted.internet.error.ConnectionRefusedError'>: Connection was refused by other side: 61: Connection refused.
from twisted.internet import protocol, task, reactor
from twisted.internet.protocol import ClientFactory
from twisted.internet.endpoints import TCP4ClientEndpoint, connectProtocol
class TestClass(protocol.Protocol):
def __init__(self):
self._make_connection = self.transport.write("Connect to the transport")
self.cnt_lost = self.transport.loseConnection()
self._tst = self.transport.getPeer()
def test_transport(self):
self._make_connection
self._tst
self.cnt_lost
class EchoClientFactory(ClientFactory):
def startedConnecting(self, connector):
print('Started to connect.')
def buildProtocol(self, addr):
print('Connected.')
return TestClass()
def clientConnectionLost(self, connector, reason):
print('Lost connection. Reason:', reason)
def clientConnectionFailed(self, connector, reason):
print('Connection failed. Reason:', reason)
reactor.connectTCP('127.0.0.1', 8050, EchoClientFactory())
reactor.run()
Not much of an answer, I managed to fix the issue above however I do not get any data returned from the server, for example, I should be getting:
Connect to the transport
Instead, I just get:
Connected
Neither, do I get the remote access of the connection.
Here's what I have tried:
listen in on the connection
class TestClass(protocol.Protocol):
def recieved_data(self, data):
self.transport.write(data)
class readClientFactory(ClientFactory):
def buildProtocol(self, addr):
print('Connected.')
return TestClass()
reactor.listenTCP(8070, readClientFactory())
reactor.run()
Connect to the port:
class readClass(protocol.Protocol):
def connectionmade(self):
self.transport.write(b"Connect to the transport")
self.transport.getPeer()
def test_transport(self):
self.transport.loseConnection()
class readClientFactory(ClientFactory):
def buildProtocol(self, addr):
print('Connected.')
return readClass()
def clientConnectionLost(self, connector, reason):
print('Lost connection. Reason:', reason)
reactor.stop()
def clientConnectionFailed(self, connector, reason):
print('Connection failed. Reason:', reason)
reactor.stop()
reactor.connectTCP('127.0.0.1', 8070, readClientFactory())
reactor.run()
output:
Connected.
It should be:
Connected.
Connect to the transport
--- Then some stuff about the ip
I'd imagine this should be a property of a channel but the channel doesn't have anything related to the connection configuration.
You can set the request timeout like this:
from clarifai_grpc.channel.clarifai_channel import ClarifaiChannel
from clarifai_grpc.grpc.api import service_pb2_grpc, service_pb2
stub = service_pb2_grpc.V2Stub(ClarifaiChannel.get_grpc_channel())
if __name__ == '__main__':
YOUR_CLARIFAI_API_KEY = 'addyourclarifaikeyhere'
auth_metadata = (('authorization', f'Key {YOUR_CLARIFAI_API_KEY}'),)
resp = stub.ListModels(service_pb2.ListModelsRequest(),
metadata=auth_metadata,
timeout=0.0001)
print(resp)
Looking at the list of configurations for a grpc channel, there doesn't seem to be a global connection timeout.
I've SSL proxy server and I want to scrape https site. I mean the connection between scrapy and the proxy is encrypted then the proxy will open a connection to the website.
after some debugging I found the following:-
currently scrapy handle the situation as follows:-
if the site is http it use ScrapyProxyAgent which send client hello then send a connect request for the website to the proxy
but if the site is https
it use a TunnelingAgent which does not send client hello to the proxy and hence the connection is terminated.
What I need is to tell scrapy to first establish a connection via ScrapyProxyAgent then use a TunnelingAgent not sure how to do that.
I tried to create a https DOWNLOAD_HANDLERS but I'm not that expert
class MyHTTPDownloader(HTTP11DownloadHandler):
def download_request(self, request, spider):
"""Return a deferred for the HTTP download"""
timeout = request.meta.get('download_timeout') or self._connectTimeout
bindaddress = request.meta.get('bindaddress')
proxy = request.meta.get('proxy')
agent = ScrapyProxyAgent(reactor,proxyURI=to_bytes(proxy, encoding='ascii'),
connectTimeout=timeout, bindAddress=bindaddress, pool=self._pool)
_, _, proxyHost, proxyPort, proxyParams = _parse(proxy)
proxyHost = to_unicode(proxyHost)
url = urldefrag(request.url)[0]
method = to_bytes(request.method)
headers = TxHeaders(request.headers)
omitConnectTunnel = b'noconnect' in proxyParams
proxyConf = (proxyHost, proxyPort,
request.headers.get(b'Proxy-Authorization', None))
if request.body:
bodyproducer = _RequestBodyProducer(request.body)
if request.body:
bodyproducer = _RequestBodyProducer(request.body)
elif method == b'POST':
bodyproducer = _RequestBodyProducer(b'')
else:
bodyproducer = None
start_time = time()
tunnelingAgent = TunnelingAgent(reactor, proxyConf,
contextFactory=self._contextFactory, connectTimeout=timeout,
bindAddress=bindaddress, pool=self._pool)
agent.request(method, to_bytes(url, encoding='ascii'), headers, bodyproducer)
I need to establish a tunnel after the proxy agent is connected.
is that even possible?
thanks in advance
I want to get the certificate hash. But I have no idea how to get the server peer certificate. Either in the request or response. The server I send the request to sets the Connection close header, so the retrieving the original ssl socket in the response doesn't work.
Currently no way, sorry.
You can check a cert hash easy though: https://docs.aiohttp.org/en/stable/client_advanced.html#ssl-control-for-tcp-sockets
The following example uses SHA-256 fingerprint check:
fingerprint = b'...' # should be 64 bytes length hash (256/8)
r = await session.get('https://example.com',
ssl=aiohttp.Fingerprint(fingerprint))
I've come up with this solution/hack
import aiohttp
class WrappedResponseClass(aiohttp.ClientResponse):
def __init__(self, *args, **kwargs):
super(WrappedResponseClass, self).__init__(*args, **kwargs)
self._peer_cert = None
async def start(self, connection, read_until_eof=False):
try:
self._peer_cert = connection.transport._ssl_protocol._extra['ssl_object'].getpeercert(True)
except Exception:
pass
return await super(WrappedResponseClass, self).start(connection, read_until_eof)
#property
def peer_cert(self):
return self._peer_cert
session = aiohttp.ClientSession(otherargs..., response_class=WrappedResponseClass)
The following works for me with aiohttp 3.8.3:
async with aiohttp.ClientSession() as session:
r = await session.get('https://bbc.com')
cert = r.connection.transport.get_extra_info('peercert')
This is echoclient code that communicate with echoserver:
from twisted.internet import protocol, reactor
class Echo(protocol.Protocol):
def dataReceived(self, data):
self.transport.write(data)
class EchoFactory(protocol.Factory):
def buildProtocol(self, addr):
return Echo()
reactor.listenTCP(8000, EchoFactory())
reactor.run()
This is echoserer:
from twisted.internet import reactor, protocol
class EchoClient(protocol.Protocol):
def connectionMade(self):
self.transport.write("Hello, world!")
def dataReceived(self, data):
print "Server said:", data
self.transport.loseConnection()
class EchoFactory(protocol.ClientFactory):
def buildProtocol(self, addr):
return EchoClient()
def clientConnectionFailed(self, connector, reason):
print "Connection failed."
reactor.stop()
def clientConnectionLost(self, connector, reason):
print "Connection lost."
reactor.stop()
reactor.connectTCP("localhost", 8000, EchoFactory())
reactor.run()
Above echoserve and echoclient communicate with each other but i want server to server communication, so here other echoserver is came and communicate with first echoserver.
You need to build a proxy client and attach it to one of the servers. And communicate with the other server via the proxy client.