Fixed sockhub missing a global and added some tor research data
This commit is contained in:
parent
c66dc92033
commit
7cbfc63f2a
14
onionglass.py
Normal file
14
onionglass.py
Normal file
@ -0,0 +1,14 @@
|
|||||||
|
import urllib.request
|
||||||
|
import json
|
||||||
|
|
||||||
|
https://metrics.torproject.org/onionoo.html#details
|
||||||
|
|
||||||
|
response = urllib.request.urlopen('https://onionoo.torproject.org/details')
|
||||||
|
data = json.loads(response.read())
|
||||||
|
|
||||||
|
|
||||||
|
for item in data['relays']:
|
||||||
|
print(item)
|
||||||
|
|
||||||
|
for item in data['bridges']:
|
||||||
|
print(item)
|
@ -21,6 +21,9 @@ def find_proxies(url: str) -> str:
|
|||||||
|
|
||||||
:param url: The URL to check for proxies.
|
:param url: The URL to check for proxies.
|
||||||
'''
|
'''
|
||||||
|
|
||||||
|
global proxies
|
||||||
|
|
||||||
try:
|
try:
|
||||||
source = urllib.request.urlopen(urllib.request.Request(url, headers={'User-Agent': 'SockHub/1.0'})).read().decode()
|
source = urllib.request.urlopen(urllib.request.Request(url, headers={'User-Agent': 'SockHub/1.0'})).read().decode()
|
||||||
if source:
|
if source:
|
||||||
|
2659
torbad/relay_contact_frequency.txt
Normal file
2659
torbad/relay_contact_frequency.txt
Normal file
File diff suppressed because it is too large
Load Diff
1
torbad/tor.exit.json
Normal file
1
torbad/tor.exit.json
Normal file
File diff suppressed because one or more lines are too long
1
torbad/tor.json
Normal file
1
torbad/tor.json
Normal file
File diff suppressed because one or more lines are too long
28
torbad/tor.md
Normal file
28
torbad/tor.md
Normal file
@ -0,0 +1,28 @@
|
|||||||
|
# Tor is NOT what you think it is
|
||||||
|
|
||||||
|
|
||||||
|
# Hardcoded "Directory Authorities" control all voting on the entire Tor network:
|
||||||
|
- https://gitlab.torproject.org/tpo/core/tor/-/blob/main/src/app/config/auth_dirs.inc
|
||||||
|
|
||||||
|
# How many unique people are running relays
|
||||||
|
|
||||||
|
First let's analyze how many Relays ware on the network currently:
|
||||||
|
```
|
||||||
|
cat tor.json | jq -c .[] | wc -l
|
||||||
|
5828
|
||||||
|
```
|
||||||
|
|
||||||
|
Next, how many of these relays provide contact information:
|
||||||
|
```
|
||||||
|
cat tor.json | jq -c .[].contact | grep -v ^null | wc -l
|
||||||
|
4459
|
||||||
|
```
|
||||||
|
|
||||||
|
Let's now analyze the frequency of duplicate cntact information:
|
||||||
|
```
|
||||||
|
cat tor.json | jq -rc .[].contact | grep -v ^null | sort | uniq -c | sort -nr > relay_contact_frequency.txt
|
||||||
|
```
|
||||||
|
|
||||||
|
You can view these stats [here](./relay_contact_frequency.txt), but based on these results, some interesting observations are made:
|
||||||
|
- 435 relay operators are running more than 1 relay
|
||||||
|
- Almost 50 relay operators are running 10 or more relays
|
31
torglass.py
31
torglass.py
@ -7,18 +7,30 @@ A simple script to pull a list of all the Tor relays / exit nodes & generate a j
|
|||||||
The example below will generate a map of all the Tor relays / exit nodes using the ipinfo.io API.
|
The example below will generate a map of all the Tor relays / exit nodes using the ipinfo.io API.
|
||||||
'''
|
'''
|
||||||
|
|
||||||
|
import datetime
|
||||||
|
|
||||||
try:
|
try:
|
||||||
import stem.descriptor.remote
|
import stem.descriptor.remote
|
||||||
except ImportError:
|
except ImportError:
|
||||||
raise SystemExit('missing required library \'stem\' (https://pypi.org/project/stem/)')
|
raise SystemExit('missing required library \'stem\' (https://pypi.org/project/stem/)')
|
||||||
|
|
||||||
def get_descriptors() -> dict:
|
|
||||||
''' Generate a json database of all Tor relays & exit nodes '''
|
def get_descriptors(start_time = None) -> dict:
|
||||||
tor_map = {'relay':list(),'exit':list()}
|
'''
|
||||||
for relay in stem.descriptor.remote.get_server_descriptors():
|
Generate a json database of all Tor relays & exit nodes.
|
||||||
|
|
||||||
|
:param start_time: (optional) datetime object to start from
|
||||||
|
'''
|
||||||
|
|
||||||
|
tor_map = { 'relay': [], 'exit': [] }
|
||||||
|
|
||||||
|
source = stem.descriptor.collector.get_server_descriptors(start = start_time) if start_time else stem.descriptor.remote.get_server_descriptors()
|
||||||
|
|
||||||
|
for relay in source:
|
||||||
data = {
|
data = {
|
||||||
'nickname' : relay.nickname,
|
'nickname' : relay.nickname,
|
||||||
'fingerprint' : relay.fingerprint,
|
'fingerprint' : relay.fingerprint,
|
||||||
|
'or_addresses' : relay.or_addresses,
|
||||||
'published' : str(relay.published) if relay.published else None,
|
'published' : str(relay.published) if relay.published else None,
|
||||||
'address' : relay.address,
|
'address' : relay.address,
|
||||||
'or_port' : relay.or_port,
|
'or_port' : relay.or_port,
|
||||||
@ -28,7 +40,7 @@ def get_descriptors() -> dict:
|
|||||||
'tor_version' : str(relay.tor_version),
|
'tor_version' : str(relay.tor_version),
|
||||||
'operating_system' : relay.operating_system,
|
'operating_system' : relay.operating_system,
|
||||||
'uptime' : relay.uptime,
|
'uptime' : relay.uptime,
|
||||||
'contact' : str(relay.contact) if relay.contact else None,
|
'contact' : relay.contact.decode('utf-8') if relay.contact else None,
|
||||||
'exit_policy' : str(relay.exit_policy) if relay.exit_policy else None,
|
'exit_policy' : str(relay.exit_policy) if relay.exit_policy else None,
|
||||||
'exit_policy_v6' : str(relay.exit_policy_v6) if relay.exit_policy_v6 else None,
|
'exit_policy_v6' : str(relay.exit_policy_v6) if relay.exit_policy_v6 else None,
|
||||||
'bridge_distribution' : relay.bridge_distribution,
|
'bridge_distribution' : relay.bridge_distribution,
|
||||||
@ -47,21 +59,28 @@ def get_descriptors() -> dict:
|
|||||||
'extra_info_sha256_digest' : relay.extra_info_sha256_digest,
|
'extra_info_sha256_digest' : relay.extra_info_sha256_digest,
|
||||||
'eventdns' : relay.eventdns,
|
'eventdns' : relay.eventdns,
|
||||||
'ntor_onion_key' : relay.ntor_onion_key,
|
'ntor_onion_key' : relay.ntor_onion_key,
|
||||||
'or_addresses' : relay.or_addresses,
|
|
||||||
'protocols' : relay.protocols
|
'protocols' : relay.protocols
|
||||||
}
|
}
|
||||||
|
|
||||||
if relay.exit_policy.is_exiting_allowed():
|
if relay.exit_policy.is_exiting_allowed():
|
||||||
tor_map['exit'].append(data)
|
tor_map['exit'].append(data)
|
||||||
else:
|
else:
|
||||||
tor_map['relay'].append(data)
|
tor_map['relay'].append(data)
|
||||||
|
|
||||||
return tor_map
|
return tor_map
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
if __name__ == '__main__':
|
||||||
import json
|
import json
|
||||||
|
|
||||||
print('loading Tor descriptors... (this could take a while)')
|
print('loading Tor descriptors... (this could take a while)')
|
||||||
|
|
||||||
|
now = datetime.datetime.now(datetime.timezone.utc)
|
||||||
|
way_back = now.replace(year=now.year-1)
|
||||||
|
|
||||||
tor_data = get_descriptors()
|
tor_data = get_descriptors()
|
||||||
|
#tor_data = get_descriptors(way_back)
|
||||||
|
|
||||||
with open('tor.json', 'w') as fd:
|
with open('tor.json', 'w') as fd:
|
||||||
json.dump(tor_data['relay'], fd)
|
json.dump(tor_data['relay'], fd)
|
||||||
|
Loading…
Reference in New Issue
Block a user