Public
Edited
Jul 25, 2024
Importers
Insert cell
Insert cell
Insert cell
Insert cell
start_time = Math.floor(new Date( date_search ).getTime() / 1000)

Insert cell
end_time = start_time + (24*3600)
Insert cell
Insert cell
bgp_raw = {
const url=`https://stat.ripe.net/data/bgplay/data.json?resource=${pfx_search}&rrcs=0%2C1%2C3%2C5%2C6%2C7%2C10%2C11%2C13%2C14%2C15%2C16%2C18%2C20%2C25&starttime=${ start_time }&endtime=${ end_time }&unix_timestamps=TRUE`
//console.log( url );
const data = fetch( url ).then((response) => response.json());
return data
}
Insert cell
function calculateHegemonyFromBGPlayJSON( bgplay_json ) {
// input BGPlay json object
// output hash of <asn> : <hegemony> values //TODO figure out what to do with changes over time
const alpha=0.05; // defines how much of outliers are being discarded. 0.05 -> 5% at each side of distribution
const out = {}; // holds output
var peer_set = new Set(); // holds the number of peers
var per_asn_cnt = {}; // holds how many peers see an ASN
var peer2asnset = {} // holds the asnset for each peer (to figure out when to recalculate hege scores

// calculate initial state
for (const init_state of bgplay_json.data.initial_state) {
peer_set.add( init_state['source_id'] )
const asns = new Set( init_state.path ) // this step removes duplicates
peer2asnset[ init_state.source_id ] = asns
for (const asn of asns) {
if(!( asn in per_asn_cnt )) { // initialise hash
per_asn_cnt[ asn ] = 0
}
per_asn_cnt[ asn ] += 1
}
}
var hegeScores = _calcheges( per_asn_cnt, peer_set.size , alpha )

// calculate on the events
for (const evt of bgplay_json.data.events) {
if ( evt['type'] == 'A' ) {
const asns = new Set( evt.attrs.path )
for (const asn of evt.attrs.path)
console.log( evt )
} else if ( evt['type'] == 'W' ) {
}
}
return hegeScores
}
Insert cell
function _calcheges( per_asn_cnt, peer_cnt, alpha ) {
// calculates hegemony scores from AS count hash
// returns a hash of ASN + hege scores
const hege = {};
const peer_corr = alpha * peer_cnt; // this is the number of peers that will be ignored due to the alpha parameter
for (const asn in per_asn_cnt) {
var raw_bc = per_asn_cnt[ asn ] / peer_cnt // raw betweenness centrality
var asn_hege =
( per_asn_cnt[ asn ] - peer_corr ) /
( peer_cnt - 2*peer_corr )
if ( raw_bc < alpha ) {
asn_hege = 0
} else if ( raw_bc > 1 - alpha ) {
asn_hege = 1
} else {
asn_hege = ( per_asn_cnt [ asn ] - peer_corr ) / ( peer_cnt - 2*peer_corr )
}
if ( asn_hege > 0 ) {
// console.log(`BC->HEGE: ${ asn } ${ per_asn_cnt[ asn ]} / ${ peer_cnt} : ${ raw_bc } -> ${ asn_hege }`)
hege[ asn ] = asn_hege
}
}
return hege;
}
Insert cell
hege = calculateHegemonyFromBGPlayJSON( bgp_raw );

Insert cell
bgp_parsed = { // need to split fetch and parse (ie. bgp_raw and bgp_parsed cells) because of reactiveness of await/response stuff that I don't really grok
const bgp = {'initial_peers': 0, 'update_msg': [], 'visibility': [], 'po_vis': {}};
const peer_state = new Set(); // TODO remove
const po_state = new Object; // contains prefix/origin state
for (const idx in bgp_raw.data.initial_state) {
const init_state = bgp_raw.data.initial_state[ idx ];
peer_state.add( init_state.source_id );
var po = init_state.target_prefix + " " + init_state.path[ init_state.path.length - 1 ];
if (! po_state.hasOwnProperty( po ) ) {
po_state[ po ] = new Set();
}
po_state[ po ].add( init_state.source_id );
bgp.initial_peers += 1;
}
bgp.visibility.push({'ts': bgp_raw.data.query_starttime, 'cnt': peer_state.size});
for ( var po in po_state ) {
bgp.po_vis[ po ] = [];
bgp.po_vis[ po ].push({'ts': bgp_raw.data.query_starttime, 'cnt': po_state[po].size})
}
for (var idx in bgp_raw.data.events) {
const msg = bgp_raw.data.events[ idx ];
const ts = msg.timestamp;
if( msg.type == 'A' ) {
peer_state.add( msg.attrs.source_id );
var po = msg.attrs.target_prefix + " " + msg.attrs.path[ msg.attrs.path.length - 1 ];
if (! po_state.hasOwnProperty( po ) ) {
po_state[ po ] = new Set();
}
po_state[ po ].add( msg.attrs.source_id )
}
if( msg.type == 'W' ) { // now we need more work, since we don't know the original origin
for ( var po in po_state ) {
po_state[ po ].delete( msg.attrs.source_id )
}
peer_state.delete( msg.attrs.source_id )
}
if ( peer_state.size != bgp.visibility[ bgp.visibility.length - 1 ]['cnt'] ) {
if ( msg.timestamp != bgp.visibility[ bgp.visibility.length - 1]['ts'] ) { // only chance once per ts granularity (seconds) # need to keep as string for this to work
bgp.visibility.push({'ts': ts, 'cnt': peer_state.size});
}
}
bgp.update_msg.push({'ts': parseTimeStat( msg['timestamp'] )
})
// update the per prefix/origin visibility timelines
for ( var po in po_state ) {
if (! bgp.po_vis.hasOwnProperty( po ) ) {
bgp.po_vis[ po ] = [];
bgp.po_vis[ po ].push({'ts': ts, 'cnt': po_state[ po ].size });
} else {
var old_rec = bgp.po_vis[ po ][ bgp.po_vis[ po ].length - 1 ];
if ( old_rec['ts'] != ts && old_rec['cnt'] != po_state[ po ].size ) {
bgp.po_vis[ po ].push({'ts': ts, 'cnt': po_state[ po ].size });
}
}
}

} // end processing each event
// put a stopper at the end
bgp.visibility.push({'ts': bgp_raw.data.query_endtime, 'cnt': peer_state.size});
for ( var po in bgp.po_vis ) {
const cnt = bgp.po_vis[ po ][ bgp.po_vis[po].length - 1 ]['cnt'];
bgp.po_vis[ po ].push({'ts': bgp_raw.data.query_endtime, 'cnt': cnt });
}
return bgp
}
Insert cell
{
const zoom = d3.zoom().on("zoom", allZoom);

function allZoom(e) {
handleZoom(e, plot);
//handleZoom(e, plot2);
}

function handleZoom(e, plt) {
const scale = plt.scale("x");
const x = d3.scaleLinear().domain(scale.domain).range(scale.range);
const startDate = new Date(
e.transform.rescaleX(x).domain()[0]
// Math.max(e.transform.rescaleX(x).domain()[0], extent[0].getTime())
);

const endDate = new Date(
e.transform.rescaleX(x).domain()[1]
//Math.min(e.transform.rescaleX(x).domain()[1], extent[1])
);

mutable domain = [startDate, endDate];
}

function initZoom() {
d3.select(plot).call(zoom);
//d3.select(plot2).call(zoom);
}

initZoom();
}
Insert cell
mutable domain = extent;
Insert cell
extent = d3.extent( [start_time, end_time].map( d => parseTime( d ) ) ) // , d => parseTime( d[0] ) )
Insert cell
parseTime = d3.utcParse("%s");
Insert cell
parseTimeStat = d3.utcParse('%Y-%m-%dT%H:%M:%S')
Insert cell
thres_raw = fetch("https://stat.ripe.net/data/ris-full-table-threshold/data.json?query_time=" + start_time ).then( (r) => { return r.json() })
Insert cell
threshold = {
if (pfx_search.indexOf(':') > -1) {
return thres_raw['data']['v6']
} else {
return thres_raw['data']['v4']
}
}
Insert cell

One platform to build and deploy the best data apps

Experiment and prototype by building visualizations in live JavaScript notebooks. Collaborate with your team and decide which concepts to build out.
Use Observable Framework to build data apps locally. Use data loaders to build in any language or library, including Python, SQL, and R.
Seamlessly deploy to Observable. Test before you ship, use automatic deploy-on-commit, and ensure your projects are always up-to-date.
Learn more