Commit e1799526 authored by yvonneanne's avatar yvonneanne
Browse files

initial commit

parent 5cf0816f
# fast-failover
This repository contains the source code for the experiments in the following three papers
Code for papers from INFOCOM 2019, DSN 2019, SRDS 2019, and future work
\ No newline at end of file
[SRDS 2019: Improved Fast Rerouting Using Postprocessing]{https://www.univie.ac.at/ct/stefan/srds19failover.pdf}
[DSN 2019: Bonsai: Efficient Fast Failover Routing Using Small Arborescences]{https://www.univie.ac.at/ct/stefan/dsn19.pdf}
[Infocom 2019: CASA: Congestion and Stretch Aware Static Fast Rerouting]{https://www.univie.ac.at/ct/stefan/infocom2019e.pdf}
by Klaus-Tycho Foerster, Andrzej Kamisinski, Yvonne-Anne Pignolet, Stefan Schmid, Gilles Tredan
We are indebted to Ilya Nikolaevskiy, Aalto University, Finland, on whose source code for [this paper](
http://www.dia.uniroma3.it/~compunet/www/docs/chiesa/Resiliency-ToN.pdf) we based our implementation.
If you use this code, please cite the corresponding paper(s).
## Bibtex
@INPROCEEDINGS{srds19foerster,
author = {Klaus-Tycho Foerster and Andrzej Kamisinski and Yvonne-Anne Pignolet and Stefan Schmid and Gilles Tredan},
title = {Improved Fast Rerouting Using Postprocessing},
booktitle = {Proc. 38th International Symposium on Reliable Distributed Systems (SRDS)},
year = {2019},
}
@INPROCEEDINGS{dsn19foerster,
author = {Klaus-Tycho Foerster and Andrzej Kamisinski and
Yvonne-Anne Pignolet and Stefan Schmid and Gilles Tredan},
title = {Bonsai: Efficient Fast Failover Routing Using Small Arborescences},
booktitle = {Proc. 49th IEEE/IFIP International Conference on Dependable Systems and Networks (DSN)},
year = {2019},
}
@INPROCEEDINGS{infocom19foerster,
author = {Klaus-Tycho Foerster and Yvonne-Anne Pignolet and Stefan Schmid and Gilles Tredan},
title = {CASA: Congestion and Stretch Aware Static Fast Rerouting},
booktitle = {Proc. IEE INFOCOM},
year = {2019},
}
## Overview
* benchmark_graphs: directory to be filled with network topologies used in the experiments
* results: directory to which csv and other output files are written
* arborescence.py: arborescence decomposition and helper algorithms
* routing_stats.py: routing algorithms, simulation and statistic framework
* objective_function_experiments.py: objective functions, independence and SRLG experiments
* srds2019_experiments.py: experiments for SRDS 2019 paper
* dsn2019_experiments.py: experiments for DSN 2019 paper
* infocom2019_experiments.py: experiments for Infocom 2019 paper
For some experiments topologies from [Rocketfuel]{https://research.cs.washington.edu/networking/rocketfuel/} and the [Internet topology zoo]{http://www.topology-zoo.org/} networks need to downloaded and copied into the benchmark_graphs directory.
To run the experiments for the SRDS paper, execute the corresponding python file:
```
python srds2019_experiments.py
```
With additional arguments the experiments can be customised (see main function of the python file). E.g.,
```
python srds2019_experiments.py all 6 1
```
executes 1 repetition of all SRDS experiments with seed 6. Similarly, the experiments for the other papers can be executed. In case of questions please send an email to Yvonne-Anne Pignolet, ya at last name dot ch.
\ No newline at end of file
This diff is collapsed.
For some experiments topologies from [Rocketfuel]{https://research.cs.washington.edu/networking/rocketfuel/} and the [Internet topology zoo]{http://www.topology-zoo.org/} networks need to downloaded and copied into this.
import sys
import networkx as nx
import numpy as np
import random
import time
from arborescences import *
import objective_function_experiments as ofe
import glob
# run experiments with AS graphs (pre-generated)
# outX denote file handles to write results to
# seed is used for pseudorandom number generation in this run
# rep denotes the number of repetitions in the secondary for loop
def run_AS(outstretch=None, outtime=None, seed=0, rep=5):
global swappy
astr = ['RR-swap'] # , 'Greedy', 'Random']
algos = {'RR-swap': RR_swap, 'Greedy': Trees, 'Random': RandomTrees}
swappy = [0]
files = glob.glob('./benchmark_graphs/AS*.csv')
for x in files:
print(x)
sys.stdout.flush()
k = int(x[-5:-4])
g = nx.read_edgelist(x).to_directed()
n = len(g.nodes())
print(x, "number of nodes", n)
sys.stdout.flush()
g.graph['k'] = k
nodes = list(g.nodes())
random.shuffle(nodes)
data = {v: {'complete': 0, 'stretch': [0 for i in range(min(rep, n))], 'depth': [0 for i in range(
min(rep, n))], 'time': [0.0 for i in range(min(rep, n))]} for (v, k) in algos.items()}
for count in range(min(rep, n)):
g.graph['root'] = nodes[count]
for a in astr:
reset_arb_attribute(g)
random.seed(seed)
t1 = time.time()
algos[a](g)
t2 = time.time()
s = -1
d = -1
t = t2-t1
if num_complete_nodes(g) == n:
s = stretch(g)
d = depth(g)
data[a]['complete'] += 1
print("success", x, count)
else:
print("fail", "results/dsn-fail_" +
x[-10:-4]+"_"+str(nodes[count])+".png", x, count)
drawArborescences(g, "results/dsn-fail_" +
x[-10:-4]+"_"+str(nodes[count])+".png")
data[a]['stretch'][count] = s
data[a]['depth'][count] = d
data[a]['time'][count] = t
if outstretch != None:
outstretch.write("%s, %d, %d, %s, %d, %d\n" %
(x, n, k, a, count, s))
outstretch.flush()
if outtime != None:
outtime.write("%s, %d, %d, %s, %d, %f\n" %
(x, n, k, a, count, t))
outtime.flush()
sys.stdout.flush()
count = min(rep, n)
print(count, 'repetitions, k', g.graph['k'], 'n', n, x)
print("algo, complete runs, stretch mean, median, max, avg time")
for a in astr:
comp = data[a]['complete']
s = data[a]['stretch'][:count]
d = data[a]['depth'][:count]
t = data[a]['time'][:count]
if comp > 0:
s = [si for si in data[a]['stretch'][:count] if si > -1]
d = [di for di in data[a]['depth'][:count] if di > -1]
print(a + " %.2f ,%.2f (%d), %.2f (%d), %.2f (%.2f) s" % (comp/count,
np.mean(s), np.max(s), np.mean(d), np.max(d), np.mean(t), np.max(t)))
else:
print(a + " %.2f, %.2f (%.2f) s" %
(comp/count, np.mean(t), np.max(t)))
print()
sys.stdout.flush()
# run experiments with regular graphs (pre-generated)
# k is the number of arborescences constructure
# n the number of nodes in the regular graphs
# outX denote file handles to write results to
# seed is used for pseudorandom number generation in this run
# rep denotes the number of repetitions in the secondary for loop
def run_regular(k=4, n=50, rep=100, outstretch=None, outtime=None, seed=0):
global edge_labels, swappy
edge_labels = {i: {} for i in range(k)}
edge_labels[-1] = {}
astr = ['RR', 'RR-con', 'RR-swap', 'RR-swap-con', 'Greedy', 'random']
astr = ['RR', 'RR-swap', 'Greedy', 'random']
algos = {'Later': BalanceLater, 'RR': RR, 'RR-con': RR_con, 'RR-swap': RR_swap, 'RR-swap-con': RR_con_swap, 'Greedy': Trees,
'Greedy-swap-stretch': OptimizeGreedyStretch, 'Greedy-swap-depth': OptimizeGreedyDepth, 'bestSw': BestSwap, 'random': RandomTrees}
data = {v: {'complete': 0, 'stretch': [0 for i in range(rep)], 'depth': [0 for i in range(
rep)], 'time': [0.0 for i in range(rep)]} for (v, k) in algos.items()}
swappy = [0]
for i in range(rep):
print("run regular, repetition, #complete, swappy",
i, data[astr[0]]['complete'], np.max(swappy))
sys.stdout.flush()
random.seed(i)
g = init_k_graph(k, n)
root = list(g.nodes())[0]
g.graph['root'] = root
g.graph['k'] = k
for a in astr:
reset_arb_attribute(g)
random.seed(i+seed)
t1 = time.time()
algos[a](g)
t2 = time.time()
s = -1
d = -1
t = t2-t1
if num_complete_nodes(g) == n:
s = stretch(g)
d = depth(g)
data[a]['complete'] += 1
else:
drawArborescences(g, "results/dsn-fail_"+str(i)+".png")
print("failed")
data[a]['stretch'][i] = s
data[a]['depth'][i] = d
data[a]['time'][i] = t
comp = data[a]['complete']
if outstretch != None:
outstretch.write("regular, %d, %d, %s, %d, %d\n" %
(n, k, a, i, s))
outstretch.flush()
if outtime != None:
outtime.write("regular, %d, %d, %s, %d, %f\n" %
(n, k, a, i, t))
outtime.flush()
sys.stdout.flush()
print()
print(rep, 'repetitions, k', g.graph['k'], 'n', n, 'random regular graphs')
print("algo, complete runs, stretch mean, median, max, avg time")
for a in astr:
comp = data[a]['complete']
if comp > 0:
s = [si for si in data[a]['stretch'][:rep] if si > -1]
d = [di for di in data[a]['depth'][:rep] if di > -1]
print(a + " %d ,%.2f (%d), %.2f (%d), %.2f (%.2f) s" % (comp,
np.mean(s), np.max(s), np.mean(d), np.max(d), np.mean(t), np.max(t)))
print(a + " %d ,%.2f (%d), %.2f (%d), %.2f (%.2f) s" % (comp,
np.mean(s), np.max(s), np.mean(d), np.max(d), np.mean(t), np.max(t)))
# run experiments for dsn 2019 paper
# seed is used for pseudorandom number generation in this run
# switch determines which experiments are run
def dsn_experiments(switch="all", seed=0, short=None):
if switch in ["AS", "all"]:
for i in range(4,8):
ofe.generate_trimmed_AS(i)
if short:
rep = short
else:
rep = 1000
filename = "results/dsn-as_seed_"+str(seed)
outstretch = open(filename+"_stretch.txt", 'a')
outstretch.write(
"#graph, size, connectivity, algorithm, index, stretch\n")
outstretch.write(
"#"+str(time.asctime(time.localtime(time.time())))+"\n")
outtime = open(filename+"_time.txt", 'a')
outtime.write("#graph, size, connectivity, algorithm, index, time\n")
outtime.write("#"+str(time.asctime(time.localtime(time.time())))+"\n")
run_AS(outstretch=outstretch, outtime=outtime, rep=rep, seed=seed)
outstretch.close()
outtime.close()
if short:
rep = short
else:
rep = 200
if switch in ["connectivity", "all"]:
n = 100
for k in [5, 10, 15, 20, 25, 30]: # ,200]:
filename = "results/dsn-regular_nodes_grow_connectivity"+str(k)
outstretch = open(filename+"_stretch.txt", 'a')
outstretch.write(
"#graph, size, connectivity, algorithm, index, stretch\n")
outstretch.write(
"#"+str(time.asctime(time.localtime(time.time())))+"\n")
outtime = open(filename+"_time.txt", 'a')
outtime.write(
"#graph, size, connectivity, algorithm, index, time\n")
outtime.write(
"#"+str(time.asctime(time.localtime(time.time())))+"\n")
run_regular(k=k, n=n, rep=rep, outstretch=outstretch,
outtime=outtime, seed=seed)
outstretch.close()
outtime.close()
if switch in ["size", "all"]:
k = 5
for n in [10, 20, 50, 100, 200, 500, 1000]: # ,200]:
filename = "results/dsn-regular_nodes_grow_size"+str(n)
outstretch = open(filename+"_stretch.txt", 'a')
outstretch.write(
"#graph, size, connectivity, algorithm, index, stretch\n")
outstretch.write(
"#"+str(time.asctime(time.localtime(time.time())))+"\n")
outtime = open(filename+"_time.txt", 'a')
outtime.write(
"#graph, size, connectivity, algorithm, index, time\n")
outtime.write(
"#"+str(time.asctime(time.localtime(time.time())))+"\n")
run_regular(k=k, n=n, rep=rep, outstretch=outstretch,
outtime=outtime, seed=seed)
outstretch.close()
outtime.close()
if __name__ == "__main__":
global rep
start = time.time()
print(time.asctime(time.localtime(start)))
switch = 'all'
seed = 0
short = None
if len(sys.argv) > 1:
switch = sys.argv[1]
if len(sys.argv) > 2:
seed = int(sys.argv[2])
if len(sys.argv) > 3:
short = int(sys.argv[3])
dsn_experiments(switch=switch, seed=seed, short=short)
end = time.time()
print("time elapsed", end-start)
print("start time", time.asctime(time.localtime(start)))
print("end time", time.asctime(time.localtime(end)))
\ No newline at end of file
import sys
import networkx as nx
import numpy as np
import itertools
import random
import time
from objective_function_experiments import *
# run experiments for infocom 2019 paper
# seed is used for pseudorandom number generation in this run
# switch determines which experiments are run
def infocom_experiments(switch):
write_graphs()
print('generated graphs')
for (method, name) in [(RandomTrees, 'Random'), (Trees, 'Greedy')]:
if switch in ['subset', 'all']:
experiment_objective_subset(
measure_stretch, method, "stretch_for_subset_"+name)
experiment_objective_subset(
measure_load, method, "load_for_subset_"+name)
if switch in ['independent', 'all']:
print(name, 'independent experiments')
experiment_objective(num_independent_paths_in_arbs,
method, "independent_paths_"+name)
if switch in ['SRLG', 'all']:
print(name, 'SRLG experiments')
experiment_SRLG(method, name)
if __name__ == "__main__":
start = time.time()
seed = 1
n = 100
rep = 100
k = 8
f_num = 40
samplesize=20
switch = 'all'
if len(sys.argv)>1:
switch = sys.argv[1]
if len(sys.argv)>2:
seed = sys.argv[2]
if len(sys.argv) > 3:
rep = int(sys.argv[3])
set_parameters([n, rep, k, samplesize, f_num, seed, "infocom-"])
infocom_experiments(switch)
end = time.time()
print(end-start)
print(time.asctime( time.localtime(start)))
print(time.asctime( time.localtime(end)))
This diff is collapsed.
This directory will contains the output files of the experiments.
import sys
import networkx as nx
import numpy as np
import itertools
import random
import time
from arborescences import *
import glob
#global variables in this file
seed = 1
n = 10
rep = 1
k = 8
f_num = 40
samplesize=20
name = "experiment"
#set global variables
def set_params(params):
global seed, n, rep, k, samplesize, name, f_num
[n, rep, k, samplesize, f_num, seed, name] = params
# Route according to deterministic circular routing as described by Chiesa et al.
# source s
# destination d
# link failure set fails
# arborescence decomposition T
def RouteDetCirc(s, d, fails, T):
curT = 0
detour_edges = []
hops = 0
switches = 0
n = len(T[0].nodes())
k = len(T)
while (s != d):
nxt = list(T[curT].neighbors(s))
if len(nxt) != 1:
print("Bug: too many or to few neighbours")
nxt = nxt[0]
if (nxt, s) in fails or (s, nxt) in fails:
curT = (curT+1) % k
switches += 1
else:
if switches > 0 and curT > 0:
detour_edges.append((s, nxt))
s = nxt
hops += 1
if hops > n or switches > k*n:
return (True, -1, switches, detour_edges)
return (False, hops, switches, detour_edges)
#select next arborescence to bounce
def Bounce(s, d, T, cur):
for i in range(len(T)):
if (d, s) in T[i].edges():
return i
else:
return (cur+1) % len(T)
# Route with bouncing as described by Chiesa et al.
# source s
# destination d
# link failure set fails
# arborescence decomposition T
def RouteDetBounce(s, d, fails, T):
detour_edges = []
curT = 0
hops = 0
switches = 0
n = len(T[0].nodes())
while (s != d):
nxt = list(T[curT].neighbors(s))
if len(nxt) != 1:
print("Bug: too many or to few neighbours")
nxt = nxt[0]
if (nxt, s) in fails or (s, nxt) in fails:
if curT == 0:
curT = Bounce(s, nxt, T, curT)
else:
curT = 3 - curT
switches += 1
else:
if switches > 0:
detour_edges.append((s, nxt))
s = nxt
hops += 1
if hops > 3*n or switches > k*n:
print("cycle Bounce")
return (True, hops, switches, detour_edges)
return (False, hops, switches, detour_edges)
#construct BIDB 7 matrix
def PrepareBIBD(connectivity):
global matrix
matrix = []
matrix.append([5,0,6,1,2,4,3])
matrix.append([0,1,2,3,4,5,6])
matrix.append([6,2,0,4,1,3,5])
matrix.append([4,3,5,0,6,1,2])
matrix.append([1,4,3,2,5,6,0])
matrix.append([2,5,4,6,3,0,1])
matrix.append([3,6,1,5,0,2,4])
# Route with BIBD matrix
# source s
# destination d
# link failure set fails
# arborescence decomposition T
def RouteBIBD(s, d, fails, T):
if len(matrix) == 0:
PrepareBIBD(k)
detour_edges = []
curT = matrix[int(s) % (k-1)][0]
hops = 0
switches = 0
source = s
n = len(T[0].nodes())
while (s != d):
nxt = list(T[curT].neighbors(s))
if len(nxt) != 1:
print("Bug: too many or to few neighbours")
nxt = nxt[0]
if (nxt, s) in fails or (s, nxt) in fails:
switches += 1
# print(switches)
curT = matrix[int(source) % (k-1)][switches % k]
else:
if switches > 0:
detour_edges.append((s, nxt))
s = nxt
hops += 1
if hops > 3*n or switches > k*n:
print("cycle BIBD")
return (True, hops, switches, detour_edges)
return (False, hops, switches, detour_edges)
#build data structure for square one algorithm
SQ1 = {}
def PrepareSQ1(G, d):
global SQ1
H = build_auxiliary_edge_connectivity(G)
R = build_residual_network(H, 'capacity')
SQ1 = {n: {} for n in G}
for u in G.nodes():
if (u != d):
k = sorted(list(nx.edge_disjoint_paths(
G, u, d, auxiliary=H, residual=R)), key=len)
SQ1[u][d] = k
# Route with bouncing as described by Chiesa et al.
# source s
# destination d
# link failure set fails
# arborescence decomposition T
def RouteSQ1(s, d, fails, T):
curRoute = SQ1[s][d][0]
k = len(SQ1[s][d])
detour_edges = []
index = 1
hops = 0
switches = 0
c = s # current node
n = len(T[0].nodes())
while (c != d):
nxt = curRoute[index]
if (nxt, c) in fails or (c, nxt) in fails:
for i in range(2, index+1):
detour_edges.append((c, curRoute[index-i]))
c = curRoute[index-i]
switches += 1
c = s
hops += (index-1)
curRoute = SQ1[s][d][switches % k]
index = 1
else:
if switches > 0:
detour_edges.append((c, nxt))
c = nxt
index += 1
hops += 1
if hops > 3*n or switches > k*n:
print("cycle square one")
return (True, hops, switches, detour_edges)
return (False, hops, switches, detour_edges)
# Route with randomixation as described by Chiesa et al.
# source s
# destination d
# link failure set fails
# arborescence decomposition T
P = 0.5358 # bounce probability
def RoutePR(s, d, fails, T):
detour_edges = []
curT = 0
hops = 0
switches = 0
n = len(T[0].nodes())
while (s != d):
nxt = list(T[curT].neighbors(s))
if len(nxt) != 1:
print("Bug: too many or to few neighbours")
nxt = nxt[0]
if (nxt, s) in fails or (s, nxt) in fails:
x = random.random()
if x <= P:
curT = Bounce(s, nxt, T, curT)
else:
newT = random.randint(0, len(T)-2)
if newT >= curT:
newT = (newT+1) % len(T)
curT = newT
switches += 1
else:
if switches > 0:
detour_edges.append((s, nxt))
s = nxt
hops += 1
if hops > 3*n or switches > k*n:
print("cycle PR")
return (True, hops, switches, detour_edges)
return (False, hops, switches, detour_edges)
# Route randomly without bouncing as described by Chiesa et al.
# source s
# destination d
# link failure set fails
# arborescence decomposition T
def RoutePRNB(s, d, fails, T):
detour_edges = []
curT = 0
hops = 0
switches = 0
n = len(T[0].nodes())
while (s != d):
nxt = list(T[curT].neighbors(s))
if len(nxt) != 1:
print("Bug: too many or to few neighbours")
nxt = nxt[0]
if (nxt, s) in fails or (s, nxt) in fails:
newT = random.randint(0, len(T)-2)