2
0
mirror of https://github.com/xcat2/confluent.git synced 2025-01-22 15:44:31 +00:00

Another iteration to try to have IP adaptive syncfiles

It is likely that a client connects from fe80::, which
is explicitly omitted from ssh principals.

This time, have the client provide all currently set IP addresses
and the server will make a determination.

There remains the possibility it misconfigures a nic and tries to use that,
inducing failure.  One strategy would be to filter the addresses and
only provide from the 'current' interface.  Another is to just take
the hit as the node is likely going to suffer a lot from such a
misconfiguration anyway.
This commit is contained in:
Jarrod Johnson 2022-10-05 12:23:47 -04:00
parent 4cb222068f
commit 6eb4bf28e5
10 changed files with 138 additions and 20 deletions

View File

@ -1,4 +1,5 @@
#!/usr/bin/python
#!/usr/bin/python3
import subprocess
import importlib
import tempfile
import json
@ -211,7 +212,20 @@ def synchronize():
appendoncedir = tempfile.mkdtemp()
try:
ac = apiclient.HTTPSClient()
data = json.dumps({'merge': tmpdir, 'appendonce': appendoncedir})
myips = []
ipaddrs = subprocess.check_output(['ip', '-br', 'a']).split(b'\n')
for line in ipaddrs:
isa = line.split()
if len(isa) < 3 or isa[1] != b'UP':
continue
for addr in isa[2:]:
if addr.startswith(b'fe80::') or addr.startswith(b'169.254'):
continue
addr = addr.split(b'/')[0]
if not isinstance(addr, str):
addr = addr.decode('utf8')
myips.append(addr)
data = json.dumps({'merge': tmpdir, 'appendonce': appendoncedir, 'myips': myips})
status, rsp = ac.grab_url_with_status('/confluent-api/self/remotesyncfiles', data)
if status == 202:
lastrsp = ''

View File

@ -1,4 +1,5 @@
#!/usr/bin/python
#!/usr/bin/python3
import subprocess
import importlib
import tempfile
import json
@ -211,7 +212,20 @@ def synchronize():
appendoncedir = tempfile.mkdtemp()
try:
ac = apiclient.HTTPSClient()
data = json.dumps({'merge': tmpdir, 'appendonce': appendoncedir})
myips = []
ipaddrs = subprocess.check_output(['ip', '-br', 'a']).split(b'\n')
for line in ipaddrs:
isa = line.split()
if len(isa) < 3 or isa[1] != b'UP':
continue
for addr in isa[2:]:
if addr.startswith(b'fe80::') or addr.startswith(b'169.254'):
continue
addr = addr.split(b'/')[0]
if not isinstance(addr, str):
addr = addr.decode('utf8')
myips.append(addr)
data = json.dumps({'merge': tmpdir, 'appendonce': appendoncedir, 'myips': myips})
status, rsp = ac.grab_url_with_status('/confluent-api/self/remotesyncfiles', data)
if status == 202:
lastrsp = ''

View File

@ -1,4 +1,5 @@
#!/usr/bin/python
#!/usr/bin/python3
import subprocess
import importlib
import tempfile
import json
@ -211,7 +212,20 @@ def synchronize():
appendoncedir = tempfile.mkdtemp()
try:
ac = apiclient.HTTPSClient()
data = json.dumps({'merge': tmpdir, 'appendonce': appendoncedir})
myips = []
ipaddrs = subprocess.check_output(['ip', '-br', 'a']).split(b'\n')
for line in ipaddrs:
isa = line.split()
if len(isa) < 3 or isa[1] != b'UP':
continue
for addr in isa[2:]:
if addr.startswith(b'fe80::') or addr.startswith(b'169.254'):
continue
addr = addr.split(b'/')[0]
if not isinstance(addr, str):
addr = addr.decode('utf8')
myips.append(addr)
data = json.dumps({'merge': tmpdir, 'appendonce': appendoncedir, 'myips': myips})
status, rsp = ac.grab_url_with_status('/confluent-api/self/remotesyncfiles', data)
if status == 202:
lastrsp = ''

View File

@ -1,4 +1,5 @@
#!/usr/bin/python
#!/usr/bin/python3
import subprocess
import importlib
import tempfile
import json
@ -211,7 +212,20 @@ def synchronize():
appendoncedir = tempfile.mkdtemp()
try:
ac = apiclient.HTTPSClient()
data = json.dumps({'merge': tmpdir, 'appendonce': appendoncedir})
myips = []
ipaddrs = subprocess.check_output(['ip', '-br', 'a']).split(b'\n')
for line in ipaddrs:
isa = line.split()
if len(isa) < 3 or isa[1] != b'UP':
continue
for addr in isa[2:]:
if addr.startswith(b'fe80::') or addr.startswith(b'169.254'):
continue
addr = addr.split(b'/')[0]
if not isinstance(addr, str):
addr = addr.decode('utf8')
myips.append(addr)
data = json.dumps({'merge': tmpdir, 'appendonce': appendoncedir, 'myips': myips})
status, rsp = ac.grab_url_with_status('/confluent-api/self/remotesyncfiles', data)
if status == 202:
lastrsp = ''

View File

@ -1,4 +1,5 @@
#!/usr/bin/python
#!/usr/bin/python3
import subprocess
import importlib
import tempfile
import json
@ -211,7 +212,20 @@ def synchronize():
appendoncedir = tempfile.mkdtemp()
try:
ac = apiclient.HTTPSClient()
data = json.dumps({'merge': tmpdir, 'appendonce': appendoncedir})
myips = []
ipaddrs = subprocess.check_output(['ip', '-br', 'a']).split(b'\n')
for line in ipaddrs:
isa = line.split()
if len(isa) < 3 or isa[1] != b'UP':
continue
for addr in isa[2:]:
if addr.startswith(b'fe80::') or addr.startswith(b'169.254'):
continue
addr = addr.split(b'/')[0]
if not isinstance(addr, str):
addr = addr.decode('utf8')
myips.append(addr)
data = json.dumps({'merge': tmpdir, 'appendonce': appendoncedir, 'myips': myips})
status, rsp = ac.grab_url_with_status('/confluent-api/self/remotesyncfiles', data)
if status == 202:
lastrsp = ''

View File

@ -1,4 +1,5 @@
#!/usr/bin/python
#!/usr/bin/python3
import subprocess
import importlib
import tempfile
import json
@ -211,7 +212,20 @@ def synchronize():
appendoncedir = tempfile.mkdtemp()
try:
ac = apiclient.HTTPSClient()
data = json.dumps({'merge': tmpdir, 'appendonce': appendoncedir})
myips = []
ipaddrs = subprocess.check_output(['ip', '-br', 'a']).split(b'\n')
for line in ipaddrs:
isa = line.split()
if len(isa) < 3 or isa[1] != b'UP':
continue
for addr in isa[2:]:
if addr.startswith(b'fe80::') or addr.startswith(b'169.254'):
continue
addr = addr.split(b'/')[0]
if not isinstance(addr, str):
addr = addr.decode('utf8')
myips.append(addr)
data = json.dumps({'merge': tmpdir, 'appendonce': appendoncedir, 'myips': myips})
status, rsp = ac.grab_url_with_status('/confluent-api/self/remotesyncfiles', data)
if status == 202:
lastrsp = ''

View File

@ -1,4 +1,5 @@
#!/usr/bin/python
#!/usr/bin/python3
import subprocess
import importlib
import tempfile
import json
@ -211,7 +212,20 @@ def synchronize():
appendoncedir = tempfile.mkdtemp()
try:
ac = apiclient.HTTPSClient()
data = json.dumps({'merge': tmpdir, 'appendonce': appendoncedir})
myips = []
ipaddrs = subprocess.check_output(['ip', '-br', 'a']).split(b'\n')
for line in ipaddrs:
isa = line.split()
if len(isa) < 3 or isa[1] != b'UP':
continue
for addr in isa[2:]:
if addr.startswith(b'fe80::') or addr.startswith(b'169.254'):
continue
addr = addr.split(b'/')[0]
if not isinstance(addr, str):
addr = addr.decode('utf8')
myips.append(addr)
data = json.dumps({'merge': tmpdir, 'appendonce': appendoncedir, 'myips': myips})
status, rsp = ac.grab_url_with_status('/confluent-api/self/remotesyncfiles', data)
if status == 202:
lastrsp = ''

View File

@ -1,4 +1,5 @@
#!/usr/bin/python
#!/usr/bin/python3
import subprocess
import importlib
import tempfile
import json
@ -211,7 +212,20 @@ def synchronize():
appendoncedir = tempfile.mkdtemp()
try:
ac = apiclient.HTTPSClient()
data = json.dumps({'merge': tmpdir, 'appendonce': appendoncedir})
myips = []
ipaddrs = subprocess.check_output(['ip', '-br', 'a']).split(b'\n')
for line in ipaddrs:
isa = line.split()
if len(isa) < 3 or isa[1] != b'UP':
continue
for addr in isa[2:]:
if addr.startswith(b'fe80::') or addr.startswith(b'169.254'):
continue
addr = addr.split(b'/')[0]
if not isinstance(addr, str):
addr = addr.decode('utf8')
myips.append(addr)
data = json.dumps({'merge': tmpdir, 'appendonce': appendoncedir, 'myips': myips})
status, rsp = ac.grab_url_with_status('/confluent-api/self/remotesyncfiles', data)
if status == 202:
lastrsp = ''

View File

@ -461,10 +461,8 @@ def handle_request(env, start_response):
elif env['PATH_INFO'].startswith('/self/remotesyncfiles'):
if 'POST' == operation:
pals = get_extra_names(nodename, cfg, myip)
if clientip not in pals:
clientip = None
result = syncfiles.start_syncfiles(
nodename, cfg, json.loads(reqbody), clientip)
nodename, cfg, json.loads(reqbody), pals)
start_response(result, ())
yield ''
return

View File

@ -278,7 +278,15 @@ def mkpathorlink(source, destination, appendexist=False):
syncrunners = {}
def start_syncfiles(nodename, cfg, suffixes, peerip=None):
def start_syncfiles(nodename, cfg, suffixes, principals=[]):
peerip = None
if 'myips' in suffixes:
targips = suffixes['myips']
del suffixes['myips']
for targip in targips:
if targip in principals:
peerip = targip
break
deployinfo = cfg.get_node_attributes(
nodename, ('deployment.*',))
deployinfo = deployinfo.get(nodename, {})