Initial commit
This commit is contained in:
commit
fd14433603
BIN
.screens/preview.png
Normal file
BIN
.screens/preview.png
Normal file
Binary file not shown.
After Width: | Height: | Size: 61 KiB |
37
README.md
Normal file
37
README.md
Normal file
@ -0,0 +1,37 @@
|
||||
# BGP Stream
|
||||
> [www](https://bgpstream.caida.org/) | [github](https://github.com/caida/libbgpstream) | [bgpstream-info@caida.org](mailto:bgpstream-info@caida.org)
|
||||
|
||||
![](.screens/preview.png)
|
||||
|
||||
### Overview
|
||||
- [BGP Stream: A framework for BGP analysis](https://ripe70.ripe.net/presentations/55-bgpstream.pdf) *(pdf)*
|
||||
|
||||
# Install BGP Stream
|
||||
|
||||
You can visit the official [install page](https://bgpstream.caida.org/docs/install) to see if there is a different approach you want to take. There is also a [docker](https://hub.docker.com/r/caida/bgpstream) image.
|
||||
|
||||
The follow outlines compiling it from source for simplicity across distros.
|
||||
|
||||
###### Requirements
|
||||
- [libcurl](https://curl.se/libcurl/)
|
||||
- [wandio 4.2.4-1](https://github.com/LibtraceTeam/wandio/releases/tag/4.2.4-1) *(wandio 4.2.5 was released recently, but not sure if it will break bgpstream)*
|
||||
|
||||
###### Compiling wandio
|
||||
1. Install the required packages: `build-essential curl zlib1g-dev libbz2-dev libcurl4-openssl-dev librdkafka-dev automake1.11 libtool`
|
||||
2. Grab the source: `curl -LO https://github.com/LibtraceTeam/wandio/archive/refs/tags/4.2.4-1.tar.gz`
|
||||
3. Extract the archive & then compile with `./configure && make && sudo make install`
|
||||
4. Lastly, run `sudo ldconfig`
|
||||
|
||||
###### Compiling libbgpstream
|
||||
1. Install required packages: `sudo apt-get install -y curl apt-transport-https ssl-cert ca-certificates gnupg lsb-release`
|
||||
1. Grab the source: `curl -LO https://github.com/CAIDA/libbgpstream/releases/download/v2.2.0/libbgpstream-2.2.0.tar.gz`
|
||||
2. Extract the archive & then compile with `./configure && make && sudo make install`
|
||||
3. Lastly, run `sudo ldconfig`
|
||||
|
||||
This will create `/usr/local/bin/bgpreader` *([documentation](https://bgpstream.caida.org/docs/tools/bgpreader))*
|
||||
|
||||
Lastly, for Python support, `pip install pybgpstream` *([documentation](https://bgpstream.caida.org/docs/api/pybgpstream))* *([pypi](https://pypi.org/project/pybgpstream/))*
|
||||
|
||||
**NOTE:** `sudo apt-get install python3-pybgpstream`
|
||||
|
||||
**NOTE:** The [Broker HTTP API](https://bgpstream.caida.org/docs/api/broker) may come to use...
|
55
examples/asnpaths.py
Normal file
55
examples/asnpaths.py
Normal file
@ -0,0 +1,55 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
#import the low level _pybgpsteam library and other necessary libraries
|
||||
from pybgpstream import BGPStream
|
||||
from ipaddress import ip_network
|
||||
import time
|
||||
import sys
|
||||
import argparse
|
||||
|
||||
parser = argparse.ArgumentParser()
|
||||
parser.add_argument("target", nargs="*", type=str, help="ASNs we are looking up")
|
||||
parser.add_argument("-d", "--debug", type=int, help="Number of traces")
|
||||
args = parser.parse_args()
|
||||
|
||||
# Initialize BGPStream with RIPE RIS LIVE and collector rrc00
|
||||
stream = BGPStream(project="ris-live",
|
||||
collectors=["rrc00"],
|
||||
filter="collector rrc00")
|
||||
|
||||
# The stream will not load new data till it's done with the current pulled data.
|
||||
stream.set_live_mode()
|
||||
print("starting stream...", file=sys.stderr)
|
||||
|
||||
# Counter
|
||||
counter = 0
|
||||
|
||||
for record in stream.records():
|
||||
# Handles debug option
|
||||
if args.debug is None:
|
||||
pass
|
||||
elif counter >= args.debug:
|
||||
break
|
||||
else:
|
||||
counter += 1
|
||||
|
||||
rec_time = time.strftime('%y-%m-%d %H:%M:%S', time.localtime(record.time))
|
||||
for elem in record:
|
||||
try:
|
||||
prefix = ip_network(elem.fields['prefix'])
|
||||
# Only print elements that are announcements (BGPElem.type = "A")
|
||||
# or ribs (BGPElem.type = "R")
|
||||
if elem.type == "A" or elem.type == "R":
|
||||
as_path = elem.fields['as-path'].split(" ")
|
||||
# Print all elements with specified in args.target
|
||||
for target in args.target:
|
||||
if target in as_path:
|
||||
print(f"Peer asn: {elem.peer_asn} AS Path: {as_path} "
|
||||
f"Communities: {elem.fields['communities']} "
|
||||
f"Timestamp: {rec_time}")
|
||||
break
|
||||
|
||||
# Reports and skips all KeyError
|
||||
except KeyError as e:
|
||||
print("KEY ERROR, element ignored: KEY=" + str(e), file=sys.stderr)
|
||||
continue
|
153
examples/build-cone.py
Normal file
153
examples/build-cone.py
Normal file
@ -0,0 +1,153 @@
|
||||
#!/usr/bin/env python2
|
||||
__author__ = "Bradley Huffaker"
|
||||
__email__ = "<bradley@caida.org>"
|
||||
# This software is Copyright (C) 2022 The Regents of the University of
|
||||
# California. All Rights Reserved. Permission to copy, modify, and
|
||||
# distribute this software and its documentation for educational, research
|
||||
# and non-profit purposes, without fee, and without a written agreement is
|
||||
# hereby granted, provided that the above copyright notice, this paragraph
|
||||
# and the following three paragraphs appear in all copies. Permission to
|
||||
# make commercial use of this software may be obtained by contacting:
|
||||
#
|
||||
# Office of Innovation and Commercialization
|
||||
# 9500 Gilman Drive, Mail Code 0910
|
||||
# University of California
|
||||
# La Jolla, CA 92093-0910
|
||||
# (858) 534-5815
|
||||
#
|
||||
# invent@ucsd.edu
|
||||
#
|
||||
# This software program and documentation are copyrighted by The Regents of
|
||||
# the University of California. The software program and documentation are
|
||||
# supplied $B!H(Bas is$B!I(B, without any accompanying services from The Regents. The
|
||||
# Regents does not warrant that the operation of the program will be
|
||||
# uninterrupted or error-free. The end-user understands that the program
|
||||
# was developed for research purposes and is advised not to rely
|
||||
# exclusively on the program for any reason.
|
||||
#
|
||||
# IN NO EVENT SHALL THE UNIVERSITY OF CALIFORNIA BE LIABLE TO ANY PARTY FOR
|
||||
# DIRECT, INDIRECT, SPECIAL, INCIDENTAL, OR CONSEQUENTIAL DAMAGES,
|
||||
# INCLUDING LOST PR OFITS, ARISING OUT OF THE USE OF THIS SOFTWARE AND ITS
|
||||
# DOCUMENTATION, EVEN IF THE UNIVERSITY OF CALIFORNIA HAS BEEN ADVISED OF
|
||||
# THE POSSIBILITY OF SUCH DAMAGE. THE UNIVERSITY OF CALIFORNIA SPECIFICALLY
|
||||
# DISCLAIMS ANY WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
|
||||
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. THE
|
||||
# SOFTWARE PROVIDED HEREUNDER IS ON AN $B!H(BAS IS$B!I(B BASIS, AND THE UNIVERSITY OF
|
||||
# CALIFORNIA HAS NO OBLIGATIONS TO PROVIDE MAINTENANCE, SUPPORT, UPDATES,
|
||||
# ENHANCEMENTS, OR MODIFICATIONS.
|
||||
#
|
||||
|
||||
#import the low level _pybgpsteam library and other necessary libraries
|
||||
from pybgpstream import BGPStream
|
||||
from datetime import date
|
||||
from datetime import timedelta
|
||||
import argparse
|
||||
|
||||
import sys
|
||||
import bz2
|
||||
|
||||
parser = argparse.ArgumentParser()
|
||||
parser.add_argument("link_file", nargs=1, type=str)
|
||||
parser.add_argument("-d", "--debug", type=int, default=-1)
|
||||
args = parser.parse_args()
|
||||
|
||||
def main():
|
||||
sys.stderr.write("This is going to take some time\n")
|
||||
|
||||
# Handle debug option
|
||||
debug_count = args.debug
|
||||
|
||||
peer_provider = download_links(args.link_file[0])
|
||||
asn__cone = download_paths(peer_provider, debug_count)
|
||||
|
||||
print "# ASN followed by ASNs in it's customer cone"
|
||||
print "# '1 23 4' means ASN 1's customer cone includes ASN 23 and ASN 4"
|
||||
for asn,cone in sorted(asn__cone.items(), key=lambda a_c: len(a_c[1]),reverse=True):
|
||||
print asn + " "+" ".join(sorted(cone,key=lambda a: int(a.lstrip('{').rstrip('}'))))
|
||||
|
||||
# Find the set of AS Relationships that are
|
||||
# peer to peer or provider to customer.
|
||||
def download_links(filename):
|
||||
sys.stderr.write("loading relationships\n")
|
||||
first = 1000
|
||||
offset = 0
|
||||
hasNextPage = True
|
||||
|
||||
peer_provider = set()
|
||||
with bz2.BZ2File(filename, mode='r') as fin:
|
||||
for line in fin:
|
||||
# skip comments
|
||||
if len(line) == 0 or line[0] == "#":
|
||||
continue
|
||||
asn0, asn1, rel = line.rstrip().split("|")
|
||||
|
||||
# peers work in both directions
|
||||
if rel == 0:
|
||||
peer_provider.add(asn1+" "+asn0)
|
||||
peer_provider.add(asn0+" "+asn1)
|
||||
|
||||
# store the link from provider to customer
|
||||
elif rel == -1:
|
||||
peer_provider.add(asn1+" "+asn0)
|
||||
else:
|
||||
peer_provider.add(asn0+" "+asn1)
|
||||
|
||||
return peer_provider
|
||||
|
||||
# download the AS paths from BGPStream
|
||||
# crop the path to the section after the
|
||||
# first peer or provider link, then add
|
||||
# all the remaining ASes to the preceeding
|
||||
# ASes in the cropped path
|
||||
def download_paths(peer_provider, debug_count):
|
||||
# The set of ASes reachable through an AS's customers
|
||||
asn__cone = {}
|
||||
|
||||
sys.stderr.write("downloading paths\n")
|
||||
|
||||
# Return a rib from yesterday's first second
|
||||
from_time = date.today() - timedelta(days=1)
|
||||
until_time = from_time + timedelta(seconds=1)
|
||||
stream = BGPStream(
|
||||
from_time=from_time.strftime("%Y-%m-%d %H:%M:%S"), until_time=until_time.strftime("%Y-%m-%d %H:%M:%S"),
|
||||
record_type="ribs"
|
||||
)
|
||||
stream.add_rib_period_filter(86400) # This should limit BGPStream to download the full first BGP dump
|
||||
|
||||
# counter
|
||||
count = 0
|
||||
for elem in stream:
|
||||
# Break when debug mode activated and count equals debug count
|
||||
if (debug_count >= 0) and (count >= debug_count):
|
||||
break
|
||||
|
||||
asns = elem.fields['as-path'].split(" ")
|
||||
|
||||
# Skip until the first peer-peer or provider->customer link
|
||||
i = 0
|
||||
while i+1 < len(asns):
|
||||
link = asns[i]+" "+asns[i+1]
|
||||
i += 1
|
||||
if link in peer_provider:
|
||||
break
|
||||
|
||||
# Since an AS only announces it's customer cone to it's peer or provider,
|
||||
# the remaining ASes in the path are in the preciding ASes customer cone.
|
||||
while i+1 < len(asns):
|
||||
if asns[i] not in asn__cone:
|
||||
asn__cone[asns[i]] = set()
|
||||
cone = asn__cone[asns[i]]
|
||||
j = i+1
|
||||
while j < len(asns):
|
||||
print (">",i,j,asns[i], asns[j])
|
||||
cone.add(asns[j])
|
||||
j += 1
|
||||
i += 1
|
||||
|
||||
# Increment count
|
||||
count += 1
|
||||
|
||||
return asn__cone
|
||||
|
||||
#run the main method
|
||||
main()
|
92
examples/download_asn_paths.py
Normal file
92
examples/download_asn_paths.py
Normal file
@ -0,0 +1,92 @@
|
||||
#!/usr/bin/env python
|
||||
__author__ = "Pooja Pathak"
|
||||
__email__ = "<pmpathak@ucsd.edu>"
|
||||
# This software is Copyright © 2020 The Regents of the University of
|
||||
# California. All Rights Reserved. Permission to copy, modify, and
|
||||
# distribute this software and its documentation for educational, research
|
||||
# and non-profit purposes, without fee, and without a written agreement is
|
||||
# hereby granted, provided that the above copyright notice, this paragraph
|
||||
# and the following three paragraphs appear in all copies. Permission to
|
||||
# make commercial use of this software may be obtained by contacting:
|
||||
#
|
||||
# Office of Innovation and Commercialization
|
||||
#
|
||||
# 9500 Gilman Drive, Mail Code 0910
|
||||
#
|
||||
# University of California
|
||||
#
|
||||
# La Jolla, CA 92093-0910
|
||||
#
|
||||
# (858) 534-5815
|
||||
#
|
||||
# invent@ucsd.edu
|
||||
#
|
||||
# This software program and documentation are copyrighted by The Regents of
|
||||
# the University of California. The software program and documentation are
|
||||
# supplied “as is”, without any accompanying services from The Regents. The
|
||||
# Regents does not warrant that the operation of the program will be
|
||||
# uninterrupted or error-free. The end-user understands that the program
|
||||
# was developed for research purposes and is advised not to rely
|
||||
# exclusively on the program for any reason.
|
||||
#
|
||||
# IN NO EVENT SHALL THE UNIVERSITY OF CALIFORNIA BE LIABLE TO ANY PARTY FOR
|
||||
# DIRECT, INDIRECT, SPECIAL, INCIDENTAL, OR CONSEQUENTIAL DAMAGES,
|
||||
# INCLUDING LOST PR OFITS, ARISING OUT OF THE USE OF THIS SOFTWARE AND ITS
|
||||
# DOCUMENTATION, EVEN IF THE UNIVERSITY OF CALIFORNIA HAS BEEN ADVISED OF
|
||||
# THE POSSIBILITY OF SUCH DAMAGE. THE UNIVERSITY OF CALIFORNIA SPECIFICALLY
|
||||
# DISCLAIMS ANY WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
|
||||
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. THE
|
||||
# SOFTWARE PROVIDED HEREUNDER IS ON AN “AS IS” BASIS, AND THE UNIVERSITY OF
|
||||
# CALIFORNIA HAS NO OBLIGATIONS TO PROVIDE MAINTENANCE, SUPPORT, UPDATES,
|
||||
# ENHANCEMENTS, OR MODIFICATIONS.
|
||||
|
||||
#!/usr/bin/env python
|
||||
|
||||
import pybgpstream
|
||||
import os.path
|
||||
|
||||
# Create pybgpstream
|
||||
stream = pybgpstream.BGPStream(
|
||||
from_time="2017-07-07 00:00:00", until_time="2017-07-07 00:10:00 UTC",
|
||||
collectors=["route-views.sg", "route-views.eqix"],
|
||||
record_type="updates",
|
||||
)
|
||||
|
||||
prefix_asn = dict()
|
||||
for elem in stream:
|
||||
# record fields can be accessed directly from elem
|
||||
if "as-path" in elem.fields:
|
||||
asns = elem.fields["as-path"].rstrip().split(" ")
|
||||
if "prefix" in elem.fields:
|
||||
prefix = elem.fields["prefix"]
|
||||
|
||||
|
||||
if len(asns) < 1:
|
||||
continue
|
||||
|
||||
# Get origin as
|
||||
asn = asns[-1]
|
||||
|
||||
# Drop origin as sets
|
||||
if len(asn.split(",")) > 1:
|
||||
continue
|
||||
|
||||
if asn[0] == '{':
|
||||
continue
|
||||
|
||||
# Populate prefix_asn with prefix to asn mapping
|
||||
if asn not in prefix_asn:
|
||||
prefix_asn[prefix] = set()
|
||||
prefix_asn[prefix].add(asn)
|
||||
|
||||
# Write prefix-asn mapping to prefix2asn.dat
|
||||
|
||||
fout = open('prefix2asn.dat', "w")
|
||||
for prefix,asns in prefix_asn.items():
|
||||
if len(asns) == 1:
|
||||
fout.write(prefix)
|
||||
fout.write("\t")
|
||||
fout.write("".join(prefix_asn[prefix]))
|
||||
fout.write("\n")
|
||||
|
||||
fout.close()
|
95
examples/ip_asn.py
Normal file
95
examples/ip_asn.py
Normal file
@ -0,0 +1,95 @@
|
||||
__author__ = "Pooja Pathak"
|
||||
__email__ = "<pmpathak@ucsd.edu>"
|
||||
# This software is Copyright © 2020 The Regents of the University of
|
||||
# California. All Rights Reserved. Permission to copy, modify, and
|
||||
# distribute this software and its documentation for educational, research
|
||||
# and non-profit purposes, without fee, and without a written agreement is
|
||||
# hereby granted, provided that the above copyright notice, this paragraph
|
||||
# and the following three paragraphs appear in all copies. Permission to
|
||||
# make commercial use of this software may be obtained by contacting:
|
||||
#
|
||||
# Office of Innovation and Commercialization
|
||||
#
|
||||
# 9500 Gilman Drive, Mail Code 0910
|
||||
#
|
||||
# University of California
|
||||
#
|
||||
# La Jolla, CA 92093-0910
|
||||
#
|
||||
# (858) 534-5815
|
||||
#
|
||||
# invent@ucsd.edu
|
||||
#
|
||||
# This software program and documentation are copyrighted by The Regents of
|
||||
# the University of California. The software program and documentation are
|
||||
# supplied “as is”, without any accompanying services from The Regents. The
|
||||
# Regents does not warrant that the operation of the program will be
|
||||
# uninterrupted or error-free. The end-user understands that the program
|
||||
# was developed for research purposes and is advised not to rely
|
||||
# exclusively on the program for any reason.
|
||||
#
|
||||
# IN NO EVENT SHALL THE UNIVERSITY OF CALIFORNIA BE LIABLE TO ANY PARTY FOR
|
||||
# DIRECT, INDIRECT, SPECIAL, INCIDENTAL, OR CONSEQUENTIAL DAMAGES,
|
||||
# INCLUDING LOST PR OFITS, ARISING OUT OF THE USE OF THIS SOFTWARE AND ITS
|
||||
# DOCUMENTATION, EVEN IF THE UNIVERSITY OF CALIFORNIA HAS BEEN ADVISED OF
|
||||
# THE POSSIBILITY OF SUCH DAMAGE. THE UNIVERSITY OF CALIFORNIA SPECIFICALLY
|
||||
# DISCLAIMS ANY WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
|
||||
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. THE
|
||||
# SOFTWARE PROVIDED HEREUNDER IS ON AN “AS IS” BASIS, AND THE UNIVERSITY OF
|
||||
# CALIFORNIA HAS NO OBLIGATIONS TO PROVIDE MAINTENANCE, SUPPORT, UPDATES,
|
||||
# ENHANCEMENTS, OR MODIFICATIONS.
|
||||
|
||||
#!/usr/bin/env python
|
||||
|
||||
import pyasn
|
||||
import argparse
|
||||
import datetime
|
||||
import resource
|
||||
import os
|
||||
import psutil
|
||||
|
||||
def returnTime():
|
||||
return datetime.datetime.now()
|
||||
|
||||
def returnMemUsage():
|
||||
process = psutil.Process(os.getpid())
|
||||
return process.memory_info()[0]
|
||||
|
||||
|
||||
parser = argparse.ArgumentParser()
|
||||
parser.add_argument('-p', dest = 'prefix2asn_file', default = '', help = 'Please enter the prefix2asn file name')
|
||||
parser.add_argument('-i', dest = 'ips_file', default = '', help = 'Please enter the file name of the ips file')
|
||||
args = parser.parse_args()
|
||||
|
||||
|
||||
# Get list of ips
|
||||
ips = []
|
||||
with open(args.ips_file) as f:
|
||||
for line in f:
|
||||
line = line.rstrip().split("\t")[1]
|
||||
ips.append(line)
|
||||
|
||||
|
||||
asndb = pyasn.pyasn(args.prefix2asn_file)
|
||||
|
||||
begin_time = returnTime()
|
||||
begin_mem = returnMemUsage()
|
||||
|
||||
# Create ip2asn mapping
|
||||
ip2asn = {}
|
||||
for ip in ips:
|
||||
if asndb.lookup(ip):
|
||||
asn,prefix = asndb.lookup(ip)
|
||||
if asn:
|
||||
ip2asn[ip] = asn
|
||||
|
||||
# print(ip2asn)
|
||||
end_time = returnTime()
|
||||
end_mem = returnMemUsage()
|
||||
|
||||
# hour:minute:second:microsecond
|
||||
print("Delta time:" , end_time - begin_time)
|
||||
print("Delta memory use:", end_mem - begin_mem)
|
||||
|
||||
|
||||
|
90
examples/ip_asn_pyipmeta.py
Normal file
90
examples/ip_asn_pyipmeta.py
Normal file
@ -0,0 +1,90 @@
|
||||
__author__ = "Pooja Pathak"
|
||||
__email__ = "<pmpathak@ucsd.edu>"
|
||||
# This software is Copyright © 2020 The Regents of the University of
|
||||
# California. All Rights Reserved. Permission to copy, modify, and
|
||||
# distribute this software and its documentation for educational, research
|
||||
# and non-profit purposes, without fee, and without a written agreement is
|
||||
# hereby granted, provided that the above copyright notice, this paragraph
|
||||
# and the following three paragraphs appear in all copies. Permission to
|
||||
# make commercial use of this software may be obtained by contacting:
|
||||
#
|
||||
# Office of Innovation and Commercialization
|
||||
#
|
||||
# 9500 Gilman Drive, Mail Code 0910
|
||||
#
|
||||
# University of California
|
||||
#
|
||||
# La Jolla, CA 92093-0910
|
||||
#
|
||||
# (858) 534-5815
|
||||
#
|
||||
# invent@ucsd.edu
|
||||
#
|
||||
# This software program and documentation are copyrighted by The Regents of
|
||||
# the University of California. The software program and documentation are
|
||||
# supplied “as is”, without any accompanying services from The Regents. The
|
||||
# Regents does not warrant that the operation of the program will be
|
||||
# uninterrupted or error-free. The end-user understands that the program
|
||||
# was developed for research purposes and is advised not to rely
|
||||
# exclusively on the program for any reason.
|
||||
#
|
||||
# IN NO EVENT SHALL THE UNIVERSITY OF CALIFORNIA BE LIABLE TO ANY PARTY FOR
|
||||
# DIRECT, INDIRECT, SPECIAL, INCIDENTAL, OR CONSEQUENTIAL DAMAGES,
|
||||
# INCLUDING LOST PR OFITS, ARISING OUT OF THE USE OF THIS SOFTWARE AND ITS
|
||||
# DOCUMENTATION, EVEN IF THE UNIVERSITY OF CALIFORNIA HAS BEEN ADVISED OF
|
||||
# THE POSSIBILITY OF SUCH DAMAGE. THE UNIVERSITY OF CALIFORNIA SPECIFICALLY
|
||||
# DISCLAIMS ANY WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
|
||||
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. THE
|
||||
# SOFTWARE PROVIDED HEREUNDER IS ON AN “AS IS” BASIS, AND THE UNIVERSITY OF
|
||||
# CALIFORNIA HAS NO OBLIGATIONS TO PROVIDE MAINTENANCE, SUPPORT, UPDATES,
|
||||
# ENHANCEMENTS, OR MODIFICATIONS.
|
||||
|
||||
#!/usr/bin/env python
|
||||
|
||||
import _pyipmeta
|
||||
import datetime
|
||||
import os
|
||||
import psutil
|
||||
|
||||
def returnTime():
|
||||
return datetime.datetime.now()
|
||||
|
||||
def returnMemUsage():
|
||||
process = psutil.Process(os.getpid())
|
||||
return process.memory_info()[0]
|
||||
|
||||
|
||||
ipm = _pyipmeta.IpMeta()
|
||||
# print(ipm)
|
||||
|
||||
# print("Getting/enabling pfx2as provider (using included test data)")
|
||||
prov = ipm.get_provider_by_name("pfx2as")
|
||||
# print(prov)
|
||||
print(ipm.enable_provider(prov, "-f /test/pfx2as/routeviews-rv2-20170329-0200.pfx2as.gz"))
|
||||
print()
|
||||
|
||||
|
||||
ips = []
|
||||
with open('ips.txt') as f:
|
||||
for line in f:
|
||||
line = line.rstrip().split("\t")[1]
|
||||
ips.append(line)
|
||||
|
||||
begin_time = returnTime()
|
||||
begin_mem = returnMemUsage()
|
||||
|
||||
ip2asn = {}
|
||||
for ip in ips:
|
||||
if ipm.lookup(ip):
|
||||
(res,) = ipm.lookup(ip)
|
||||
if res.get('asns'):
|
||||
ip2asn[ip] = res.get('asns')
|
||||
|
||||
|
||||
# print(ip2asn)
|
||||
end_time = returnTime()
|
||||
end_mem = returnMemUsage()
|
||||
|
||||
# hour:minute:second:microsecond
|
||||
print("Delta time:" , end_time - begin_time)
|
||||
print("Delta memory:", end_mem - begin_mem)
|
43
examples/pybgpstream-aspath.py
Normal file
43
examples/pybgpstream-aspath.py
Normal file
@ -0,0 +1,43 @@
|
||||
import pybgpstream
|
||||
import networkx as nx
|
||||
from collections import defaultdict
|
||||
from itertools import groupby
|
||||
|
||||
# Create an instance of a simple undirected graph
|
||||
as_graph = nx.Graph()
|
||||
|
||||
bgp_lens = defaultdict(lambda: defaultdict(lambda: None))
|
||||
|
||||
stream = pybgpstream.BGPStream(
|
||||
# Consider this time interval:
|
||||
# Sat, 01 Aug 2015 7:50:00 GMT - 08:10:00 GMT
|
||||
from_time="2015-08-01 07:50:00", until_time="2015-08-01 08:10:00",
|
||||
collectors=["rrc00"],
|
||||
record_type="ribs",
|
||||
)
|
||||
|
||||
for rec in stream.records():
|
||||
for elem in rec:
|
||||
# Get the peer ASn
|
||||
peer = str(elem.peer_asn)
|
||||
# Get the array of ASns in the AS path and remove repeatedly prepended ASns
|
||||
hops = [k for k, g in groupby(elem.fields['as-path'].split(" "))]
|
||||
if len(hops) > 1 and hops[0] == peer:
|
||||
# Get the origin ASn
|
||||
origin = hops[-1]
|
||||
# Add new edges to the NetworkX graph
|
||||
for i in range(0,len(hops)-1):
|
||||
as_graph.add_edge(hops[i],hops[i+1])
|
||||
# Update the AS path length between 'peer' and 'origin'
|
||||
bgp_lens[peer][origin] = \
|
||||
min(list(filter(bool,[bgp_lens[peer][origin],len(hops)])))
|
||||
|
||||
# For each 'peer' and 'origin' pair
|
||||
for peer in bgp_lens:
|
||||
for origin in bgp_lens[peer]:
|
||||
# compute the shortest path in the NetworkX graph
|
||||
nxlen = len(nx.shortest_path(as_graph, peer, origin))
|
||||
# and compare it to the BGP hop length
|
||||
print((peer, origin, bgp_lens[peer][origin], nxlen))
|
||||
|
||||
|
33
examples/pybgpstream-communities.py
Normal file
33
examples/pybgpstream-communities.py
Normal file
@ -0,0 +1,33 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
import pybgpstream
|
||||
from collections import defaultdict
|
||||
|
||||
stream = pybgpstream.BGPStream(
|
||||
# Consider this time interval:
|
||||
# Sat, 01 Aug 2015 7:50:00 GMT - 08:10:00 GMT
|
||||
from_time="2015-08-01 07:50:00", until_time="2015-08-01 08:10:00",
|
||||
collectors=["rrc06"],
|
||||
record_type="ribs",
|
||||
filter="peer 25152 and prefix more 185.84.166.0/23 and community *:3400"
|
||||
)
|
||||
|
||||
# <community, prefix > dictionary
|
||||
community_prefix = defaultdict(set)
|
||||
|
||||
# Get next record
|
||||
for rec in stream.records():
|
||||
for elem in rec:
|
||||
# Get the prefix
|
||||
pfx = elem.fields['prefix']
|
||||
# Get the associated communities
|
||||
communities = elem.fields['communities']
|
||||
# for each community save the set of prefixes
|
||||
# that are affected
|
||||
for c in communities:
|
||||
community_prefix[c].add(pfx)
|
||||
|
||||
# Print the list of MOAS prefix and their origin ASns
|
||||
for ct in community_prefix:
|
||||
print("Community:", ct, "==>", ",".join(community_prefix[ct]))
|
||||
|
35
examples/pybgpstream-moas.py
Normal file
35
examples/pybgpstream-moas.py
Normal file
@ -0,0 +1,35 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
from collections import defaultdict
|
||||
import pybgpstream
|
||||
|
||||
stream = pybgpstream.BGPStream(
|
||||
# Consider this time interval:
|
||||
# Sat, 01 Aug 2015 7:50:00 GMT - 08:10:00 GMT
|
||||
from_time="2015-08-01 07:50:00", until_time="2015-08-01 08:10:00",
|
||||
collectors=["rrc00"],
|
||||
record_type="ribs",
|
||||
)
|
||||
|
||||
# <prefix, origin-ASns-set > dictionary
|
||||
prefix_origin = defaultdict(set)
|
||||
|
||||
for rec in stream.records():
|
||||
for elem in rec:
|
||||
# Get the prefix
|
||||
pfx = elem.fields["prefix"]
|
||||
# Get the list of ASes in the AS path
|
||||
ases = elem.fields["as-path"].split(" ")
|
||||
if len(ases) > 0:
|
||||
# Get the origin ASn (rightmost)
|
||||
origin = ases[-1]
|
||||
# Insert the origin ASn in the set of
|
||||
# origins for the prefix
|
||||
prefix_origin[pfx].add(origin)
|
||||
|
||||
# Print the list of MOAS prefix and their origin ASns
|
||||
for pfx in prefix_origin:
|
||||
if len(prefix_origin[pfx]) > 1:
|
||||
print((pfx, ",".join(prefix_origin[pfx])))
|
||||
|
||||
|
13
examples/pybgpstream-ris-live.py
Normal file
13
examples/pybgpstream-ris-live.py
Normal file
@ -0,0 +1,13 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
import pybgpstream
|
||||
stream = pybgpstream.BGPStream(
|
||||
# accessing ris-live
|
||||
project="ris-live",
|
||||
# filter to show only stream from rrc00
|
||||
filter="collector rrc00",
|
||||
)
|
||||
|
||||
for elem in stream:
|
||||
print(type(elem))
|
||||
print(elem)
|
12
examples/pybgpstream-routeviews-stream.py
Normal file
12
examples/pybgpstream-routeviews-stream.py
Normal file
@ -0,0 +1,12 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
import pybgpstream
|
||||
stream = pybgpstream.BGPStream(
|
||||
# accessing routeview-stream
|
||||
project="routeviews-stream",
|
||||
# filter to show only stream from amsix bmp stream
|
||||
filter="router amsix",
|
||||
)
|
||||
|
||||
for elem in stream:
|
||||
print(elem)
|
48
examples/records.py
Normal file
48
examples/records.py
Normal file
@ -0,0 +1,48 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
# Import pybgpstream and other necessary libraries
|
||||
from pybgpstream import BGPStream
|
||||
import time
|
||||
import argparse
|
||||
|
||||
parser = argparse.ArgumentParser()
|
||||
parser.add_argument("print_amount", nargs=1, type=int, help="Number of prints")
|
||||
args = parser.parse_args()
|
||||
|
||||
# Initialize BGPStream, with data from routeviews-stream for router amsix.
|
||||
stream = BGPStream(project='routeviews-stream', filter="router amsix")
|
||||
|
||||
# Counter to stop BGPStream after X amount of prints.
|
||||
counter = 0
|
||||
|
||||
# Print records yielded from stream.records() in a bgpreader-like format.
|
||||
for record in stream.records():
|
||||
# Print the first X records found.
|
||||
if counter >= args.print_amount[0]:
|
||||
break
|
||||
else:
|
||||
counter += 1
|
||||
|
||||
print(record.project, record.collector, record.router)
|
||||
# Make the date is human readable
|
||||
rec_time = time.strftime('%y-%m-%d %H:%M:%S', time.localtime(record.time))
|
||||
for elem in record:
|
||||
# Print the current element in the record. Both are equivelent.
|
||||
# print(elem)
|
||||
print("{}|{}|{}|{}|{}|{}|{}|{}|{}|{}|{}|{}|{}|{}|{}".format(
|
||||
elem.record_type,
|
||||
elem.type,
|
||||
rec_time,
|
||||
elem.project,
|
||||
elem.collector,
|
||||
elem.router,
|
||||
elem.router_ip,
|
||||
elem.peer_asn,
|
||||
elem.peer_address,
|
||||
elem._maybe_field("prefix"),
|
||||
elem._maybe_field("next-hop"),
|
||||
elem._maybe_field("as-path"),
|
||||
" ".join(elem.fields["communities"]) if "communities" in elem.fields else None,
|
||||
elem._maybe_field("old-state"),
|
||||
elem._maybe_field("new-state")
|
||||
))
|
44
examples/rpki.py
Normal file
44
examples/rpki.py
Normal file
@ -0,0 +1,44 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
from pybgpstream import BGPStream
|
||||
from ipaddress import ip_network
|
||||
import requests
|
||||
import sys
|
||||
import json
|
||||
import argparse
|
||||
|
||||
# Initialize BGPStream, with routeviews-stream project, filtering for amsix.
|
||||
stream = BGPStream(project="routeviews-stream", filter="router amsix")
|
||||
print("starting stream...", file=sys.stderr)
|
||||
|
||||
# Debug Option to limit number of traces
|
||||
parser = argparse.ArgumentParser()
|
||||
parser.add_argument("-d", "--debug", type=int, help="Number of traces")
|
||||
args = parser.parse_args()
|
||||
|
||||
# Counter
|
||||
counter = 0
|
||||
for record in stream.records():
|
||||
# Handles debug option
|
||||
if args.debug is None:
|
||||
pass
|
||||
elif counter >= args.debug:
|
||||
break
|
||||
else:
|
||||
counter += 1
|
||||
|
||||
for elem in record:
|
||||
prefix = ip_network(elem.fields['prefix'])
|
||||
if elem.type == "A":
|
||||
# Lookup RPKI state based on announced route.
|
||||
request = requests.get(f"https://api.routeviews.org/rpki?prefix={prefix}", verify=False)
|
||||
response = request.json()
|
||||
# Skip all None responses
|
||||
if response[str(prefix)] is not None:
|
||||
data = {
|
||||
"prefix": str(prefix),
|
||||
"rpki": response[str(prefix)],
|
||||
"timestamp": response[str(prefix)]['timestamp']
|
||||
}
|
||||
# Output json to stdout
|
||||
print(json.dumps(data))
|
Loading…
Reference in New Issue
Block a user