mirror of git://git.acid.vegas/tools.git
Compare commits
2 Commits
640ca6f2ca
...
c757f35cef
Author | SHA1 | Date |
---|---|---|
Dionysus | c757f35cef | |
Dionysus | 5a13675282 |
|
@ -0,0 +1,115 @@
|
|||
#!/usr/bin/env python
|
||||
# hateserv irc bot - developed by acidvegas in python (https://git.acid.vegas/hateserv)
|
||||
|
||||
import http.client
|
||||
import json
|
||||
import re
|
||||
|
||||
def between(source, start, stop):
|
||||
data = re.compile(start + '(.*?)' + stop, re.IGNORECASE|re.MULTILINE).search(source)
|
||||
return data.group(1) if data else False
|
||||
|
||||
def geoip(ip: str):
|
||||
api = geturl('api.ipapi.is', '?q='+ip)
|
||||
data = json.loads(api)
|
||||
LOCATION = '{0}, {1}, {2}'.format(data['location']['city'], data['location']['state'], data['location']['country_code'])
|
||||
ASN = 'AS{0} {1}'.format(data['asn']['asn'], data['asn']['descr'])
|
||||
RIR = data['rir']
|
||||
return {'location': LOCATION, 'asn': ASN, 'rir': RIR}
|
||||
|
||||
def geturl(url, endpoint, headers={}):
|
||||
conn = http.client.HTTPSConnection(url, timeout=15)
|
||||
conn.request('GET', endpoint, headers=headers)
|
||||
response = conn.getresponse().read()
|
||||
conn.close()
|
||||
return response
|
||||
|
||||
def google(query):
|
||||
service = build('customsearch', 'v1', developerKey=google_api_key, cache_discovery=False).cse()
|
||||
results = service.list(q=query, cx=google_cse_id, num=10).execute()
|
||||
return results['items'] if results else False
|
||||
|
||||
def github(option, query):
|
||||
if option == 'search':
|
||||
data = json.loads(geturl('api.github.com', '/search/repositories?q='+query, headers={'Accept':'application/vnd.github.v3+json','User-Agent':'HateServ/1.0'}))
|
||||
return data['items'] if data['items'] else False
|
||||
elif option == 'repo':
|
||||
return json.loads(geturl('api.github.com', '/repos/'+query, headers={'Accept':'application/vnd.github.v3+json','User-Agent':'HateServ/1.0'}))
|
||||
elif option == 'user':
|
||||
return json.loads(geturl('api.github.com', '/users/'+query, headers={'Accept':'application/vnd.github.v3+json','User-Agent':'HateServ/1.0'}))
|
||||
|
||||
def imdb(query):
|
||||
''' https://www.omdbapi.com '''
|
||||
year = query.split()[-1]
|
||||
query = query.replace(' ','%20')
|
||||
search = 'i' if query.startswith('tt') else 't'
|
||||
if search == 't' and len(year) == 4 and year.isdigit():
|
||||
endpoint = f'/?{search}={query[:-5]}&y={year}&apikey={api_key}'
|
||||
else:
|
||||
endpoint = f'/?{search}={query}&apikey={api_key}'
|
||||
conn = http.client.HTTPSConnection('omdbapi.com', timeout=15)
|
||||
conn.request('GET', endpoint, headers={'Accept':'application/json'})
|
||||
response = json.loads(conn.getresponse().read())
|
||||
conn.close()
|
||||
return response if response['Response'] == 'True' else False
|
||||
|
||||
def reddit(option, subreddit, id=None):
|
||||
if option == 'post':
|
||||
data = json.loads(geturl('reddit.com', f'/r/{subreddit}/comments/{id}.json', headers={'Accept':'application/json','User-Agent':'HateServ/1.0'}))
|
||||
return data[0]['data']['children'][0]['data'] if 'error' not in data else False
|
||||
elif option == 'subreddit':
|
||||
data = json.loads(geturl('reddit.com', f'/r/{subreddit}.json?limit=20', headers={'Accept':'application/json','User-Agent':'HateServ/1.0'}))
|
||||
posts = [item['data'] for item in data['data']['children'] if not item['data']['stickied']]
|
||||
return posts if posts else None
|
||||
|
||||
def youtube(option, query, api_key):
|
||||
if option == 'video':
|
||||
api = httplib.get_json(f'https://www.googleapis.com/youtube/v3/videos?key={config.api.google_api_key}&part=snippet,statistics&id={id}')
|
||||
if api['items']:
|
||||
api = api['items'][0]
|
||||
data = {}
|
||||
data['channel'] = api['snippet']['channelTitle']
|
||||
data['description'] = ' '.join(api['snippet']['description'].split())
|
||||
data['dislikes'] = api['statistics']['dislikeCount']
|
||||
data['likes'] = api['statistics']['likeCount']
|
||||
data['title'] = api['snippet']['title']
|
||||
data['views'] = api['statistics']['viewCount']
|
||||
return data
|
||||
else:
|
||||
return False
|
||||
elif option == 'search':
|
||||
service = build('youtube', 'v3', developerKey=api_key).search()
|
||||
results = service.list(part='id', type='video', q=query, maxResults=10).execute()
|
||||
return results['items'] if results else False
|
||||
|
||||
def twitter(data):
|
||||
# auth = tweepy.OAuthHandler(twitter_consumer_key, twitter_consumer_secret)
|
||||
# auth.set_access_token(twitter_access_token, twitter_access_secret)
|
||||
# api = tweepy.API(auth)
|
||||
# api.update_status(data)
|
||||
pass
|
||||
|
||||
def unreal():
|
||||
pass
|
||||
|
||||
def anope():
|
||||
pass
|
||||
|
||||
|
||||
'''
|
||||
elif args[0] == '.imdb' and len(args) >= 2:
|
||||
query = ' '.join(args[1:])
|
||||
api = imdb.search(query, config.api.omdbapi_key)
|
||||
if api:
|
||||
Commands.sendmsg(chan, '{0} {1} {2} {3}'.format(color('Title :', white), api['Title'], api['Year'], color(api['Rated'], grey)))
|
||||
Commands.sendmsg(chan, '{0} {1}{2}'.format(color('Link :', white), underline, color('https://imdb.com/title/' + api['imdbID'], light_blue)))
|
||||
Commands.sendmsg(chan, '{0} {1}'.format(color('Genre :', white), api['Genre']))
|
||||
if api['imdbRating'] == 'N/A':
|
||||
Commands.sendmsg(chan, '{0} {1} N/A'.format(color('Rating :', white), color('★★★★★★★★★★', grey)))
|
||||
else:
|
||||
Commands.sendmsg(chan, '{0} {1}{2} {3}'.format(color('Rating :', white), color('★'*round(float(api['imdbRating'])), yellow), color('★'*(10-round(float(api['imdbRating']))), grey), a
|
||||
pi['imdbRating']))
|
||||
Commands.sendmsg(chan, '{0} {1}'.format(color('Plot :', white), api['Plot']))
|
||||
else:
|
||||
Commands.error(chan, 'no results found')
|
||||
'''
|
|
@ -0,0 +1,143 @@
|
|||
import csv
|
||||
import io
|
||||
import json
|
||||
import urllib.request
|
||||
import sys
|
||||
import time
|
||||
|
||||
def download_file(url: str, dest_filename: str, chunk_size: int = 1024*1024):
|
||||
'''
|
||||
Download a file from a given URL in chunks and save to a destination filename.
|
||||
|
||||
:param url: The URL of the file to download
|
||||
:param dest_filename: The destination filename to save the downloaded file
|
||||
:param chunk_size: Size of chunks to download. Default is set to 1MB.
|
||||
'''
|
||||
with urllib.request.urlopen(url) as response:
|
||||
total_size = int(response.getheader('Content-Length').strip())
|
||||
downloaded_size = 0
|
||||
with open(dest_filename, 'wb') as out_file:
|
||||
while True:
|
||||
start_time = time.time()
|
||||
chunk = response.read(chunk_size)
|
||||
if not chunk:
|
||||
break
|
||||
downloaded_size += len(chunk)
|
||||
out_file.write(chunk)
|
||||
end_time = time.time()
|
||||
speed = len(chunk) / (end_time - start_time)
|
||||
progress = (downloaded_size / total_size) * 100
|
||||
sys.stdout.write(f'\rDownloaded {downloaded_size} of {total_size} bytes ({progress:.2f}%) at {speed/1024:.2f} KB/s\r')
|
||||
sys.stdout.flush()
|
||||
print()
|
||||
|
||||
def get_url(url: str, sent_headers: dict = {}, reader: bool = True):
|
||||
'''
|
||||
Retrieve a URL with custom headers.
|
||||
|
||||
:param url: The URL to retrieve
|
||||
:param data: The headers to send
|
||||
:param reader: Return the reader object instead of the decoded data
|
||||
'''
|
||||
req = urllib.request.Request(url, headers=sent_headers)
|
||||
if reader:
|
||||
return urllib.request.urlopen(req, timeout=10).read().decode()
|
||||
else:
|
||||
return urllib.request.urlopen(req, timeout=10)
|
||||
|
||||
def setup_user_agent(user_agent: str = 'Mozilla/5.0 (Windows NT 10.0; Win64; x64)'):
|
||||
'''
|
||||
Set up urllib.request user agent.
|
||||
|
||||
:param user_agent: The user agent to use
|
||||
'''
|
||||
handler = urllib.request.HTTPHandler()
|
||||
opener = urllib.request.build_opener(handler)
|
||||
opener.addheaders = [('User-agent', user_agent)]
|
||||
urllib.request.install_opener(opener)
|
||||
|
||||
|
||||
# -------------------------------------------------------------------------------- #
|
||||
|
||||
def asn_seach(query: str):
|
||||
'''
|
||||
Search for an ASN by string.
|
||||
|
||||
:param query: The string to search
|
||||
'''
|
||||
return json.loads(get_url('https://api.bgpview.io/search?query_term='+query))
|
||||
|
||||
def cve_search(query: str, limit: str = '25'):
|
||||
'''
|
||||
Search for a CVE by string.
|
||||
|
||||
:param query: The string to search
|
||||
:param limit: The number of results to return
|
||||
'''
|
||||
return json.loads(get_url(f'https://services.nvd.nist.gov/rest/json/cves/2.0?keywordSearch={query}&resultsPerPage={limit}'))
|
||||
|
||||
def geoip(ip: str):
|
||||
'''
|
||||
Get the geolocation of an IP address.
|
||||
|
||||
:param ip: The IP address to geolocate
|
||||
'''
|
||||
return json.loads(get_url('https://api.ipapi.is/?q='+ip))
|
||||
|
||||
def github(option: str, query: str):
|
||||
'''
|
||||
Search for a GitHub repository or user.
|
||||
|
||||
:param option: The option to search for (search, repo, user)
|
||||
:param query: The query to search
|
||||
'''
|
||||
header = {'Accept': 'application/vnd.github.v3+json'}
|
||||
if option == 'search':
|
||||
url = 'https://api.github.com/search/repositories?q=' + query
|
||||
data = json.loads(get_url(url, header)) # Changed this line
|
||||
return data['items'] if data['items'] else False
|
||||
elif option == 'repo':
|
||||
url = 'https://api.github.com/repos/' + query
|
||||
return json.loads(get_url(url, header)) # And this one
|
||||
elif option == 'user':
|
||||
url = 'https://api.github.com/users/' + query
|
||||
return json.loads(get_url(url, header)) # And this one
|
||||
|
||||
def librex(query: str):
|
||||
'''
|
||||
Search on the SuperNETs running LibreX.
|
||||
|
||||
:param query: The query to search
|
||||
'''
|
||||
return json.loads(get_url(f'https://librex.supernets.org/api.php?q={query}&t=0'))
|
||||
|
||||
def reddit(option, subreddit, id=None):
|
||||
'''
|
||||
Search for a Reddit post or subreddit.
|
||||
|
||||
:param option: The option to search for (post, subreddit)
|
||||
:param subreddit: The subreddit to search
|
||||
:param id: The post ID to search
|
||||
'''
|
||||
header = {'Accept':'application/json'}
|
||||
if option == 'post':
|
||||
data = json.loads(get_url('https://reddit.com', f'/r/{subreddit}/comments/{id}.json', header))
|
||||
return data[0]['data']['children'][0]['data'] if 'error' not in data else False
|
||||
elif option == 'subreddit':
|
||||
data = json.loads(get_url('https://reddit.com', f'/r/{subreddit}.json?limit=20', header))
|
||||
posts = [item['data'] for item in data['data']['children'] if not item['data']['stickied']]
|
||||
return posts if posts else None
|
||||
|
||||
def exploitdb(query: str):
|
||||
'''
|
||||
Search for an exploit or shellcode on ExploitDB.
|
||||
|
||||
:param query: The query to search
|
||||
'''
|
||||
exploits = get_url('https://git.supernets.org/mirrors/exploitdb/raw/branch/main/files_exploits.csv')
|
||||
shellcodes = get_url('https://git.supernets.org/mirrors/exploitdb/raw/branch/main/files_shellcodes.csv')
|
||||
results = []
|
||||
for database in (exploits, shellcodes):
|
||||
reader = csv.DictReader(io.StringIO(database))
|
||||
results += [row for row in reader if query.lower() in row['description'].lower()]
|
||||
return results
|
|
@ -1,21 +1,19 @@
|
|||
#!/usr/bin/env python
|
||||
# hateserv irc bot - developed by acidvegas in python (https://git.acid.vegas/hateserv)
|
||||
|
||||
import json
|
||||
import random
|
||||
import re
|
||||
import socket
|
||||
import ssl
|
||||
import time
|
||||
import urllib.request
|
||||
|
||||
import api
|
||||
import commands
|
||||
|
||||
# Config
|
||||
admin = 'acidvegas!~stillfree@most.dangerous.motherfuck'
|
||||
server = 'irc.supernets.org'
|
||||
channel = '#dev'
|
||||
nickname = 'HateServ'
|
||||
nickname = '[dev]HateServ'
|
||||
username = 'H'
|
||||
realname = 'SuperNETs HATE Services'
|
||||
nickserv_password = 'simps0nsfan22'
|
||||
|
@ -70,18 +68,18 @@ def urlcheck(msg):
|
|||
url = url[0]
|
||||
try:
|
||||
if (check := re.match('^.*?github.com\/([0-9A-Za-z]+\/[0-9A-Za-z]+).*?', url, re.IGNORECASE)):
|
||||
data = api.github('repo', check.group(1))
|
||||
data = commands.github('repo', check.group(1))
|
||||
if data:
|
||||
if not data['description']:
|
||||
data['description'] = 'no description available'
|
||||
sendmsg(channel, '{0} {1} {2} [{3}:{4}|{5}:{6}|{7}:{8}]'.format(color(' GitHub ', black, grey), data['full_name'], color('('+data['description']+')', grey), color('Stars', purple), data['stargazers_count'], color('Watch', purple), data['watchers'], color('Forks', purple), data['forks']))
|
||||
elif (check := re.match('^.*?github.com\/([0-9A-Za-z]+)', url, re.IGNORECASE)):
|
||||
data = api.github('user', check.group(1))
|
||||
data = commands.github('user', check.group(1))
|
||||
if data:
|
||||
data['bio'] = data['bio'].replace('\r\n','') if data['bio'] else ''
|
||||
sendmsg(channel, '{0} {1} {2} {3} [{4}:{5}|{6}:{7}]'.format(color(' GitHub ', black, grey), data['login'], color('('+data['name']+')', grey), data['bio'], color('Repos', purple), data['public_repos'], color('Followers', purple), data['followers']))
|
||||
elif (check := re.match('^.*?reddit.com\/r\/(.*?)\/comments\/([0-9A-Za-z]+).*$', url, re.IGNORECASE)):
|
||||
data = api.reddit('post', check.group(1), check.group(2))
|
||||
data = commands.reddit('post', check.group(1), check.group(2))
|
||||
sendmsg(channel, '[{0}] - {1} [{2}/{3}|{4}]'.format(color('reddit', cyan), color(trim(data['title'], 300), white), color('+' + str(data['ups']), green), color('-' + str(data['downs']), red), color(str(data['num_comments']), white)))
|
||||
elif (check := re.match('^.*?youtu(be)?\.([a-z])+\/(watch(.*?)(\?|\&)v=)?(.*?)(&(.)*)*$', url, re.IGNORECASE)):
|
||||
pass
|
||||
|
@ -109,67 +107,50 @@ def event_message(chan, nick, ident, msg):
|
|||
if ident == admin:
|
||||
if msg == '.massjoin':
|
||||
raw('WHO * n%nc')
|
||||
elif args[0] == '.t':
|
||||
content = msg[3:]
|
||||
if len(content) < 240:
|
||||
api.twitter('tweet', content)
|
||||
sendmsg(chan, '\x01ACTION whispers "{content}" into Elon Musks ear...\x01')
|
||||
else:
|
||||
irc_error(chan, 'tweet too long (max: 240)')
|
||||
if msg == '.talent':
|
||||
if random.randint(1,5000) == 1337:
|
||||
for i in range(100):
|
||||
sendmsg(chan, nick + ': ' + color(' RIP-DIDDLE-DIP-DIP-DIP-DIP IT\'S YOUR BIRTHDAY !!! ', random.randint(2,13), random.randint(2,13)))
|
||||
else:
|
||||
sendmsg(chan, color('(^)', random.randint(2,13)))
|
||||
elif args[0] == '.g':
|
||||
if args[0] in ('.g','.s'):
|
||||
query = ' '.join(args[1:])
|
||||
results = api.google(query)
|
||||
results = commands.librex(query)
|
||||
if results:
|
||||
for item in results:
|
||||
sendmsg(chan, '[{0}] {1}'.format(color(str(results.index(item)+1).zfill(2), pink), trim(item['title'], 300)))
|
||||
sendmsg(chan, ' '*5 + underline + color(item['link'], light_blue))
|
||||
else:
|
||||
irc_error(chan, 'no results found')
|
||||
elif args[0] == '.cve':
|
||||
data = commands.cve_search(' '.join(args[1:]))
|
||||
for item in data['vulnerabilities']:
|
||||
id = item['cve']['id']
|
||||
desc = item['cve']['descriptions'][0]['value']
|
||||
sendmsg(chan, '[{0}] {1} - {2}'.format(color(str(data['vulnerabilities'].index(item)+1).zfill(2), pink), color(id, cyan), trim(desc, 300)))
|
||||
elif args[0] == '.ip':
|
||||
data = commands.geoip(args[1])
|
||||
location = '{0}, {1}, {2}'.format(data['location']['city'], data['location']['state'], data['location']['country_code'])
|
||||
asn = 'AS{0} ({1})'.format(data['asn']['asn'], data['asn']['descr'])
|
||||
sendmsg(chan, '[{0}] {1} under {2} controlled by {3}'.format(color('geoip', light_blue), color(location, yellow), color(asn, cyan), color(data['rir'], pink)))
|
||||
elif args[0] == '.gh':
|
||||
query = ' '.join(args[1:]).replace(' ','%20')
|
||||
results = api.github('search',query)
|
||||
results = commands.github('search',query)
|
||||
if results:
|
||||
for item in results:
|
||||
if not item['description']:
|
||||
item['description'] = 'no description'
|
||||
sendmsg(chan, '[{0}] {1}/{2}{3}{4} {5}'.format(color(str(results.index(item)+1).zfill(2), pink), item['owner']['login'], bold, item['name'], reset, color('('+item['description']+')', grey)))
|
||||
sendmsg(chan, ' '*5 + underline + color(item['html_url'], light_blue))
|
||||
elif args[0] == '.netsplit' and len(args) >= 2:
|
||||
query = ' '.join(args[1:])
|
||||
results = api.netsplit('channels', query)
|
||||
if results:
|
||||
for item in results:
|
||||
sendmsg(chan, '[{0}] {1} on {2} {3}'.format(color(str(results.index(item)+1).zfill(2), pink), color(item['channel'], purple), color(item['network'], yellow), color('('+item['users']+')', grey)))
|
||||
sendmsg(chan, ' '*5 + color(trim(item['topic'], 300), light_blue))
|
||||
else:
|
||||
irc_error(chan, 'no results found')
|
||||
elif args[0] == '.r' and len(args) == 2:
|
||||
query = args[1]
|
||||
results = api.reddit('subreddit', query)
|
||||
results = commands.reddit('subreddit', query)
|
||||
if results:
|
||||
for item in results:
|
||||
sendmsg(chan, '[{0}] {1} [{2}/{3}|{4}]'.format(color(str(results.index(item)+1).zfill(2), pink), trim(item['title'], 300), color('+' + str(item['ups']), green), color('-' + str(item['downs']), red), color(item['num_comments'], white)))
|
||||
sendmsg(chan, ' '*5 + underline + color(item['url'], light_blue))
|
||||
else:
|
||||
irc_error(chan, 'no results found')
|
||||
elif args[0] == '.yt':
|
||||
query = ' '.join(args[1:])
|
||||
results = api.youtube('search', query)
|
||||
if results:
|
||||
for result in results:
|
||||
sendmsg(chan, '[{0}] {1}'.format(color(str(results.index(item)+1).zfill(2), pink), trim(item['snippet']['title'], 300)))
|
||||
sendmsg(chan, ' '*5 + underline + color('https://www.youtube.com/watch?v='+item['id']['videoId'], light_blue))
|
||||
|
||||
while True:
|
||||
try:
|
||||
sock = ssl.wrap_socket(socket.socket())
|
||||
sock.connect((server, 6697))
|
||||
#try:
|
||||
#sock = ssl.wrap_socket(socket.socket())
|
||||
sock = socket.socket()
|
||||
sock.connect((server, 6667))
|
||||
raw(f'USER {username} 0 * :{realname}')
|
||||
raw('NICK ' + nickname)
|
||||
while True:
|
||||
|
@ -201,16 +182,16 @@ while True:
|
|||
nick = args[0].split('!')[0][1:].lower()
|
||||
msg = ' '.join(args[3:])[1:]
|
||||
if chan == channel:
|
||||
try:
|
||||
#try:
|
||||
event_message(chan, nick, ident, msg)
|
||||
except Exception as ex:
|
||||
irc_error(chan, 'unknown error occured', ex)
|
||||
#except Exception as ex:
|
||||
# irc_error(chan, 'unknown error occured', ex)
|
||||
elif chan == nickname and ident == admin and msg.startswith('.raw '):
|
||||
raw(msg[5:])
|
||||
except (UnicodeDecodeError, UnicodeEncodeError):
|
||||
pass
|
||||
except Exception as ex:
|
||||
error('fatal error occured', ex)
|
||||
sock.close()
|
||||
finally:
|
||||
time.sleep(15)
|
||||
#except Exception as ex:
|
||||
# error('fatal error occured', ex)
|
||||
# sock.close()
|
||||
#finally:
|
||||
# time.sleep(15)
|
|
@ -1,40 +0,0 @@
|
|||
#!/bin/sh
|
||||
# SuperNETs tool for Anope deployment - Developed by acidvegas (https://git.acid.vegas/supertools)
|
||||
ANOPE=$HOME/services
|
||||
SOURCE=$HOME/services.source
|
||||
[ $(command -v curl) ] && echo "error: missing required package 'curl'" && exit 1
|
||||
[ $(command -v git) ] && echo "error: missing required package 'git'" && exit 1
|
||||
[ $(command -v jq) ] && echo "error: missing required package 'jq'" && exit 1
|
||||
[ $(command -v make) ] && echo "error: missing required package 'make'" && exit 1
|
||||
if [ "$#" = '1' ]; then
|
||||
if [ $1 = 'check' ]; then
|
||||
CURRENT=$($ANOPE/bin/services -v | cut -d' ' -f1 | cut -d'-' -f2)
|
||||
LATEST=$(curl -s https://api.github.com/repos/anope/anope/releases/latest | jq '.tag_name')
|
||||
[ ! $CURRENT = $LATEST ] && echo "new version available: $LATEST"
|
||||
elif [ $1 = 'deploy' ]; then
|
||||
git clone --depth 1 https://github.com/supernets/anope.git $SOURCE
|
||||
cd $SOURCE && $SOURCE/Config -nointro -quick && cd $SOURCE/build && make && make install && cd $HOME && rm -rf $SOURCE
|
||||
if [ $(command -v crontab) ]; then
|
||||
crontab -l | { cat; echo "*/5 * * * * $HOME/services/data/services.chk >/dev/null 2>&1"; } | crontab -
|
||||
crontab -l | { cat; echo "@reboot $HOME/services/bin/services"; } | crontab -
|
||||
elif [ $(command -v systemctl) ]; then
|
||||
echo -e "[Unit]\nDescription=Anope Check Timer\n\n[Timer]\nOnBootSec=1min\nOnUnitActiveSec=5min\n\n[Install]\nWantedBy=timers.target" > $HOME/.config/systemd/user/anope.timer
|
||||
echo -e "[Unit]\nDescription=Anope Check Service\n\n[Service]\nType=oneshot\nExecStart=$HOME/services/data/services.chk >/dev/null 2>&1" > $HOME/.config/systemd/user/anope.service
|
||||
else
|
||||
echo "warning: cron/systemd not found on system! (reboot/restart timers not set)"
|
||||
fi
|
||||
read -p "host = " HOST && sed -i 's/host = "REDACTED"/host = "$HOST"/g' $ANOPE/conf/services.conf
|
||||
read -p "port = " PORT && sed -i 's/port = REDACTED/port = $PORT/g' $ANOPE/conf/services.conf
|
||||
read -p "password = " PASSWORD && sed -i 's/password = "REDACTED"/password = "$PASSWORD"/g' $ANOPE/conf/services.conf
|
||||
read -p "seed = " SEED && sed -i 's/seed = REDACTED/seed = $SEED/g' $ANOPE/conf/services.conf
|
||||
$ANOPE/bin/services
|
||||
elif [ $1 = 'update' ]; then
|
||||
BACKUP=$ANOPE.backup
|
||||
mkdir $BACKUP && cp $NAOPE/conf/services.conf $BACKUP && cp $ANOPE/data/anope.db $BACKUP
|
||||
pkill -9 services && rm -rf $ANOPE
|
||||
git clone --depth 1 https://github.com/supernets/anope.git $SOURCE
|
||||
cd $SOURCE && $SOURCE/Config -nointro -quick && cd $SOURCE/build && make && make install && cd $HOME && rm -rf $SOURCE
|
||||
mv $BACKUP/services.conf $ANOPE/conf/ && mv $BACKUP/anope.db $ANOPE/data
|
||||
$ANOPE/bin/services
|
||||
fi
|
||||
fi
|
|
@ -1,11 +0,0 @@
|
|||
#!/bin/sh
|
||||
# SuperNETs tool for git deployment - Developed by acidvegas (https://git.acid.vegas/supertools)
|
||||
[ ! $(grep -q /usr/bin/git-shell /etc/shells) ] && echo "/usr/bin/git-shell" >> /etc/shells
|
||||
[ ! $(getent passwd git) ] && userdel -f git
|
||||
useradd -d /srv/git -k /dev/null -m -s /usr/bin/git-shell -U git
|
||||
mkdir -p /srv/git/git-shell-commands
|
||||
echo -e "#!/bin/sh\nmkdir $1.git\ngit init --bare $1" > /srv/git/git-shell-commands/add
|
||||
echo -e "#!/bin/sh\nrm -rf $1" > /srv/git/git-shell-commands/del
|
||||
chmod +x /srv/git/git-shell-commands/* && chown -R git:git /srv/git/git-shell-commands
|
||||
echo "Be sure to use 'AuthorizedKeysFile /etc/ssh/authorized_keys/%u' in your /etc/ssh/sshd_config"
|
||||
echo "Add your public key to /etc/ssh/authorized_keys/git prefixed with 'no-port-forwarding,no-X11-forwarding,no-agent-forwarding,no-pty'"
|
|
@ -1,18 +1,33 @@
|
|||
#!/bin/sh
|
||||
# SuperNETs tool for UnrealIRCd deployment - Developed by acidvegas (https://git.acid.vegas/supertools)
|
||||
# debian deployment: apt-get install build-essential pkg-config gdb libssl-dev libpcre2-dev libargon2-0-dev libsodium-dev libc-ares-dev libcurl4-openssl-dev
|
||||
|
||||
UNREAL=$HOME/unrealircd
|
||||
SOURCE=$UNREAL.source
|
||||
[ ! $(command -v curl) ] && echo "error: missing required package 'curl'" && exit 1
|
||||
[ ! $(command -v git) ] && echo "error: missing required package 'git'" && exit 1
|
||||
[ ! $(command -v make) ] && echo "error: missing required package 'make'" && exit 1
|
||||
if [ "$#" = '1' ]; then
|
||||
if [ $1 = 'check' ]; then
|
||||
|
||||
for pkg in curl git jq make; do
|
||||
if ! command -v $pkg > /dev/null; then
|
||||
echo "error: missing required package '$pkg'"
|
||||
exit 1
|
||||
fi
|
||||
done
|
||||
|
||||
case "$1" in
|
||||
check)
|
||||
[ ! $(command -v jq) ] && echo "error: missing required package 'jq'" && exit 1
|
||||
CURRENT=$($UNREAL/unrealircd version | cut -d'-' -f2)
|
||||
LATEST=$(curl -s https://www.unrealircd.org/downloads/list.json | jq '[.[]][1].Stable.version')
|
||||
[ ! $CURRENT = $LATEST ] && echo "new version available: $LATEST"
|
||||
elif [ $1 = 'deploy' ]; then
|
||||
;;
|
||||
|
||||
distcert)
|
||||
for link in cowboy contra omega omni phish; do # Make this an arguement instead of hardcoded
|
||||
scp irc.* $link:unrealircd/conf/tls
|
||||
ssh $1 unrealircd/unrealircd rehash && unrealircd/unrealircd reloadtls
|
||||
done
|
||||
;;
|
||||
|
||||
deploy)
|
||||
git clone --depth 1 https://github.com/supernets/unrealircd.git $SOURCE
|
||||
cd $SOURCE && echo -e "\n" | ./Config -nointro && make && make install && cd $HOME && rm -rf $SOURCE
|
||||
rm $UNREAL/conf/*.conf
|
||||
|
@ -28,12 +43,15 @@ if [ "$#" = '1' ]; then
|
|||
elif [ $(command -v systemctl) ]; then
|
||||
echo -e "[Unit]\nDescription=UnrealIRCd Cron Check Timer\n\n[Timer]\nOnBootSec=1min\nOnUnitActiveSec=5min\n\n[Install]\nWantedBy=timers.target" > $HOME/.config/systemd/user/unreal.timer
|
||||
echo -e "[Unit]\nDescription=UnrealIRCd Cron Check Service\n\n[Service]\nType=oneshot\nExecStart=$HOME/unrealircd/unrealircd croncheck" > $HOME/.config/systemd/user/unreal.service
|
||||
systemctl --user enable unreal.timer && systemctl --user start unreal.timer
|
||||
else
|
||||
echo "warning: cron/systemd not found on system! (reboot/restart timers not set)"
|
||||
fi
|
||||
$UNREAL/unrealircd spkifp | tail -n2 | head -1
|
||||
curl -4 icanhazip.com && curl -6 icanhazip.com
|
||||
elif [ $1 = 'source'; then
|
||||
;;
|
||||
|
||||
source)
|
||||
wget -O $SOURCE.tar.gz https://www.unrealircd.org/downloads/unrealircd-latest.tar.gz
|
||||
tar -xvf $SOURCE.tar.gz --one-top-level --strip-components=1 && rm $SOURCE.tar.gz
|
||||
sed -i 's/NICKNAMEHISTORYLENGTH="2000"/NICKNAMEHISTORYLENGTH="100"/g' $SOURCE/Config
|
||||
|
@ -46,7 +64,9 @@ if [ "$#" = '1' ]; then
|
|||
sed -i 's;//#undef FAKELAG_CONFIGURABLE;#define FAKELAG_CONFIGURABLE;g' $SOURCE/include/config.h
|
||||
rm $SOURCE/doc/conf/* && rm $SOURCE/doc/conf/aliases && rm $SOURCE/doc/conf/examples && rm $SOURCE/doc/conf/help
|
||||
cp $HOME/dev/git/supernets/unrealircd/doc/conf/* $SOURCE/doc/conf/
|
||||
elif [ $1 = 'update']; then
|
||||
;;
|
||||
|
||||
update)
|
||||
BACKUP=$UNREAL.backup
|
||||
mkdir $BACKUP && cp $UNREAL/conf/unrealircd.conf $BACKUP && cp $UNREAL/conf/tls/*.pem $BACKUP && cp $UNREAL/data/*.db $BACKUP
|
||||
git clone --depth 1 https://github.com/supernets/unrealircd.git $SOURCE
|
||||
|
@ -54,5 +74,5 @@ if [ "$#" = '1' ]; then
|
|||
cd $SOURCE && (echo -e "\n" | ./Config -nointro) && make && make install && cd $HOME && rm -rf $SOURCE
|
||||
rm $UNREAL/conf/*.conf && mv $BACKUP/unrealircd.conf $UNREAL/conf && mv $BACKUP/*.pem $UNREAL/conf/tls && mv $BACKUP/*.db $UNREAL/data && rm -r $BACKUP
|
||||
$UNREAL/unrealircd start &
|
||||
fi
|
||||
fi
|
||||
;;
|
||||
esac
|
|
@ -0,0 +1,11 @@
|
|||
#!/bin/sh
|
||||
# https://jitsi.github.io/handbook/docs/devops-guide/devops-guide-quickstart/
|
||||
sudo curl -sL https://prosody.im/files/prosody-debian-packages.key -o /etc/apt/keyrings/prosody-debian-packages.key
|
||||
echo "deb [signed-by=/etc/apt/keyrings/prosody-debian-packages.key] http://packages.prosody.im/debian $(lsb_release -sc) main" | sudo tee /etc/apt/sources.list.d/prosody-debian-packages.list
|
||||
sudo apt install lua5.2
|
||||
|
||||
curl -sL https://download.jitsi.org/jitsi-key.gpg.key | sudo sh -c 'gpg --dearmor > /usr/share/keyrings/jitsi-keyring.gpg'
|
||||
echo "deb [signed-by=/usr/share/keyrings/jitsi-keyring.gpg] https://download.jitsi.org stable/" | sudo tee /etc/apt/sources.list.d/jitsi-stable.list
|
||||
|
||||
sudo apt update
|
||||
sudo apt install jitsi-meet
|
|
@ -0,0 +1,58 @@
|
|||
#!/bin/sh
|
||||
# SuperNETs tool for Anope services - Developed by acidvegas (https://git.acid.vegas/supertools)
|
||||
# requires cmake
|
||||
|
||||
ANOPE=$HOME/services
|
||||
SOURCE=$HOME/services.source
|
||||
|
||||
for pkg in curl git jq make; do
|
||||
if ! command -v $pkg > /dev/null; then
|
||||
echo "error: missing required package '$pkg'"
|
||||
exit 1
|
||||
fi
|
||||
done
|
||||
|
||||
case "$1" in
|
||||
check)
|
||||
CURRENT=$($ANOPE/bin/services -v | cut -d' ' -f1 | cut -d'-' -f2)
|
||||
LATEST=$(curl -s https://api.github.com/repos/anope/anope/releases/latest | jq -r '.tag_name')
|
||||
if [ "$CURRENT" != "$LATEST" ]; then
|
||||
echo "new version available: $LATEST"
|
||||
fi
|
||||
;;
|
||||
|
||||
deploy)
|
||||
git clone --depth 1 https://github.com/supernets/anope.git "$SOURCE"
|
||||
cd "$SOURCE" && ./Config -nointro -quick && cd build && make && make install && cd $HOME && rm -rf "$SOURCE"
|
||||
if command -v crontab > /dev/null; then
|
||||
(crontab -l; echo "*/5 * * * * $HOME/services/data/services.chk >/dev/null 2>&1") | crontab -
|
||||
(crontab -l; echo "@reboot $HOME/services/bin/services") | crontab -
|
||||
elif command -v systemctl > /dev/null; then
|
||||
printf "[Unit]\nDescription=Anope Check Timer\n\n[Timer]\nOnBootSec=1min\nOnUnitActiveSec=5min\n\n[Install]\nWantedBy=timers.target" > "$HOME/.config/systemd/user/anope.timer"
|
||||
printf "[Unit]\nDescription=Anope Check Service\n\n[Service]\nType=oneshot\nExecStart=$HOME/services/data/services.chk >/dev/null 2>&1" > "$HOME/.config/systemd/user/anope.service"
|
||||
systemctl --user enable anope.timer && systemctl --user start anope.timer
|
||||
else
|
||||
echo "warning: cron/systemd not found on system! (reboot/restart timers not set)"
|
||||
fi
|
||||
for param in host port password seed; do
|
||||
read -p "$param = " VALUE
|
||||
sed -i "s/$param = \"REDACTED\"/$param = \"$VALUE\"/g" "$ANOPE/conf/services.conf"
|
||||
done
|
||||
$ANOPE/bin/services
|
||||
;;
|
||||
|
||||
update)
|
||||
BACKUP="$ANOPE.backup"
|
||||
mkdir "$BACKUP" && cp "$ANOPE/conf/services.conf" "$BACKUP" && cp "$ANOPE/data/anope.db" "$BACKUP"
|
||||
pkill -9 services && rm -rf "$ANOPE"
|
||||
git clone --depth 1 https://github.com/supernets/anope.git "$SOURCE"
|
||||
cd "$SOURCE" && ./Config -nointro -quick && cd build && make && make install && cd $HOME && rm -rf "$SOURCE"
|
||||
mv "$BACKUP/services.conf" "$ANOPE/conf/"
|
||||
mv "$BACKUP/anope.db" "$ANOPE/data"
|
||||
$ANOPE/bin/services
|
||||
;;
|
||||
|
||||
*)
|
||||
echo "Usage: $0 {check|deploy|update}"
|
||||
;;
|
||||
esac
|
|
@ -0,0 +1,14 @@
|
|||
#!/bin/sh
|
||||
apt-get install tor
|
||||
{
|
||||
echo "HiddenServiceDir /var/lib/tor/ircd"
|
||||
echo "HiddenServicePort 6667 unix:/etc/tor/unrealircd/tor_ircd.socket"
|
||||
echo "HiddenServicePort 6697 unix:/etc/tor/unrealircd/tor_tls_ircd.socket"
|
||||
echo "#MapAddress irc.supernets.org changeme.onion"
|
||||
} > /etc/tor/torrc
|
||||
mkdir /etc/tor/unrealircd
|
||||
chown unrealircd:debian-tor /etc/tor/unrealircd
|
||||
chmod 750 /etc/tor/unrealircd
|
||||
systemctl restart tor.service && systemctl enable tor.service
|
||||
cat /var/lib/tor/ircd/hostname
|
||||
echo "MapAddress irc1.example.net xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx.onion" >> /etc/tor/torrc
|
|
@ -1,95 +0,0 @@
|
|||
#!/usr/bin/env python
|
||||
# Discord IRC Relay - Developed by acidvegas in Python (https://git.acid.vegas/tools)
|
||||
|
||||
import asyncio
|
||||
import ssl
|
||||
|
||||
class IRC:
|
||||
def __init__(self):
|
||||
self.options = {'host':'irc.supernets.org','port':6697,'limit':1024,'ssl':self.ssl_ctx(),'family':2,'local_addr':None}
|
||||
self.reader, self.writer = (None, None)
|
||||
|
||||
def ssl_ctx(self):
|
||||
ctx = ssl.create_default_context()
|
||||
ctx.check_hostname = False
|
||||
ctx.verify_mode = ssl.CERT_NONE
|
||||
#ctx.load_cert_chain(config.cert.file, password=config.cert.password)
|
||||
return ctx
|
||||
|
||||
def raw(self, data):
|
||||
self.writer.write(data[:510].encode('utf-8') + b'\r\n')
|
||||
|
||||
async def connect(self):
|
||||
try:
|
||||
self.reader, self.writer = await asyncio.open_connection(**self.options)
|
||||
self.raw(f'USER relay 0 * :Discord Relay Bot')
|
||||
self.raw('NICK DISCORD')
|
||||
except Exception as ex:
|
||||
print(f'[!] - Failed to connect to IRC server! ({ex!s})')
|
||||
else:
|
||||
while not self.reader.at_eof():
|
||||
try:
|
||||
line = await self.reader.readline()
|
||||
line = line.decode('utf-8').strip()
|
||||
print('[IRC] ' + line)
|
||||
args = line.split()
|
||||
if args[0] == 'PING':
|
||||
self.raw('PONG ' + args[1][1:])
|
||||
elif args[1] == '001': #RPL_WELCOME
|
||||
self.raw('MODE DISCORD +BD')
|
||||
self.raw('PRIVMSG NickServ :IDENTIFY DISCORD REDACTED')
|
||||
self.raw('JOIN #superbowl')
|
||||
elif args[1] == 'PRIVMSG':
|
||||
nick = args[0].split('!')[0][1:]
|
||||
chan = args[2]
|
||||
msg = ' '.join(args[3:])[1:]
|
||||
if chan == '#superbowl' and nick not in ('DISCORD','EliManning','CANCER','scroll','DickServ','AMBERALERT'):
|
||||
if '\x02' not in msg and '\x03' not in msg and '\x1D' not in msg and '\x1F' not in msg and '\x16' not in msg:
|
||||
DiscordBot.queue.append(f'{nick}: {msg}')
|
||||
except (UnicodeDecodeError, UnicodeEncodeError):
|
||||
pass
|
||||
except Exception as ex:
|
||||
print(f'[!] - Unknown error has occured! ({ex!s})')
|
||||
|
||||
Bot_IRC = IRC()
|
||||
|
||||
# ---------------------------------------------------------------------------------------------------- #
|
||||
|
||||
try:
|
||||
import discord
|
||||
except ImportError:
|
||||
raise SystemExit('missing discord modules (https://pypi.org/project/discord.py/)')
|
||||
|
||||
class Discord(discord.Client):
|
||||
server_id = 'CHANGEME'
|
||||
channel_id = 'CHANGEME'
|
||||
admin_id = 'CHANGEME'
|
||||
queue = list()
|
||||
|
||||
async def queue_loop(self):
|
||||
while True: # 5 every 5 minutes?
|
||||
if self.queue:
|
||||
message = '\n'.join(self.queue)[:2000]
|
||||
await self.channel_id.send(message)
|
||||
self.queue = list()
|
||||
await asyncio.sleep(1)
|
||||
|
||||
async def on_message(self, message):
|
||||
if message.author == self.user or message.channel != self.channel_id:
|
||||
return
|
||||
content = message.clean_content
|
||||
if len(message.attachments) > 0:
|
||||
content += ' ' + message.attachments[0].url
|
||||
print(f'[Discord] {message.author.name}: {content}')
|
||||
Bot_IRC.raw(f'PRIVMSG #superbowl :{message.author.name}: {content}')
|
||||
|
||||
async def on_ready(self):
|
||||
print(f'[Discord] Client: {self.user.name} ({self.user.id!s})')
|
||||
self.server_id = [x for x in self.guilds if str(x.id) == self.server_id][0]
|
||||
self.channel_id = [x for x in self.server_id.channels if str(x.id) == self.channel_id and x.type == discord.ChannelType.text][0]
|
||||
print(f'[Discord] Server: {self.server_id} ({self.channel_id!s})')
|
||||
asyncio.create_task(self.queue_loop())
|
||||
asyncio.create_task(Bot_IRC.connect())
|
||||
|
||||
DiscordBot = Discord()
|
||||
DiscordBot.run('CHANGEME')
|
113
hateserv/api.py
113
hateserv/api.py
|
@ -1,113 +0,0 @@
|
|||
#!/usr/bin/env python
|
||||
# hateserv irc bot - developed by acidvegas in python (https://git.acid.vegas/hateserv)
|
||||
|
||||
import http.client
|
||||
import json
|
||||
import re
|
||||
import urllib.parse
|
||||
|
||||
from googleapiclient.discovery import build # https://pypi.org/project/google-api-python-client/
|
||||
#import tweepy # https://pypi.org/project/tweepy/
|
||||
|
||||
# API Keys
|
||||
google_api_key = 'redacted' # https://console.developers.google.com/
|
||||
google_cse_id = 'redacted' # https://cse.google.com/
|
||||
twitter_bearer_token = 'redacted'
|
||||
twitter_access_token = 'redacted'
|
||||
twitter_access_token_secret = 'redacted'
|
||||
twitter_client_id = 'redacted'
|
||||
twitter_client_secret = 'redacted'
|
||||
twitter_api_key = 'redacted'
|
||||
twitter_api_secret = 'redacted'
|
||||
|
||||
def between(source, start, stop):
|
||||
data = re.compile(start + '(.*?)' + stop, re.IGNORECASE|re.MULTILINE).search(source)
|
||||
return data.group(1) if data else False
|
||||
|
||||
def geturl(url, endpoint, headers={}):
|
||||
conn = http.client.HTTPSConnection(url, timeout=15)
|
||||
conn.request('GET', endpoint, headers=headers)
|
||||
response = conn.getresponse().read()
|
||||
conn.close()
|
||||
return response
|
||||
|
||||
def google(query):
|
||||
service = build('customsearch', 'v1', developerKey=google_api_key, cache_discovery=False).cse()
|
||||
results = service.list(q=query, cx=google_cse_id, num=10).execute()
|
||||
return results['items'] if results else False
|
||||
|
||||
def netsplit(option, query):
|
||||
if option == 'channels':
|
||||
data = str(geturl('netsplit.de', '/channels/?chat='+query))
|
||||
for i in ('​','<b>','</b>','<span style="color:#000000;">','<strong>','</strong>','<span class="cs-no-topic">'):
|
||||
data = data.replace(i, '')
|
||||
results = re.findall('<div class="cs-result">(.*?)</div>', data, re.IGNORECASE|re.MULTILINE)
|
||||
if results:
|
||||
channels = list()
|
||||
for item in results:
|
||||
channel = between(item, '<span class="cs-channel">', '</span>')
|
||||
network = between(item, '<span class="cs-network">', '</span>')
|
||||
users = between(item, ' - ', ' users - ')
|
||||
if '<span class="cs-details">current topic:' in item:
|
||||
topic = between(item, '<span class="cs-details">current topic: </span>', '<br>').replace('</span>','')
|
||||
else:
|
||||
topic = 'no topic set for channel'
|
||||
channels.append({'channel':channel, 'network':network, 'users':users, 'topic':topic})
|
||||
return channels
|
||||
else:
|
||||
return False
|
||||
elif option == 'networks':
|
||||
data = str(geturl('netsplit.de', '/networks/'))
|
||||
results = re.findall('<a class="competitor" href="/networks/(.*?)/" title=', data, re.IGNORECASE|re.MULTILINE)
|
||||
return results if results else False
|
||||
|
||||
def reddit(option, subreddit, id=None):
|
||||
if option == 'post':
|
||||
data = json.loads(geturl('www.reddit.com', f'/r/{subreddit}/comments/{id}.json', headers={'Accept':'application/json','User-Agent':'HateServ/1.0'}))
|
||||
return data[0]['data']['children'][0]['data'] if 'error' not in data else False
|
||||
elif option == 'subreddit':
|
||||
data = json.loads(geturl('www.reddit.com', f'/r/{subreddit}.json?limit=20', headers={'Accept':'application/json','User-Agent':'HateServ/1.0'}))
|
||||
posts = [item['data'] for item in data['data']['children'] if not item['data']['stickied']]
|
||||
return posts[:10] if posts else None
|
||||
|
||||
def github(option, query):
|
||||
if option == 'search':
|
||||
data = json.loads(geturl('api.github.com', '/search/repositories?q='+query, headers={'Accept':'application/vnd.github.v3+json','User-Agent':'HateServ/1.0'}))
|
||||
return data['items'][:10] if data['items'] else False
|
||||
elif option == 'repo':
|
||||
return json.loads(geturl('api.github.com', '/repos/'+query, headers={'Accept':'application/vnd.github.v3+json','User-Agent':'HateServ/1.0'}))
|
||||
elif option == 'user':
|
||||
return json.loads(geturl('api.github.com', '/users/'+query, headers={'Accept':'application/vnd.github.v3+json','User-Agent':'HateServ/1.0'}))
|
||||
|
||||
def youtube(option, query, api_key):
|
||||
if option == 'video':
|
||||
api = httplib.get_json(f'https://www.googleapis.com/youtube/v3/videos?key={config.api.google_api_key}&part=snippet,statistics&id={id}')
|
||||
if api['items']:
|
||||
api = api['items'][0]
|
||||
data = {}
|
||||
data['channel'] = api['snippet']['channelTitle']
|
||||
data['description'] = ' '.join(api['snippet']['description'].split())
|
||||
data['dislikes'] = api['statistics']['dislikeCount']
|
||||
data['likes'] = api['statistics']['likeCount']
|
||||
data['title'] = api['snippet']['title']
|
||||
data['views'] = api['statistics']['viewCount']
|
||||
return data
|
||||
else:
|
||||
return False
|
||||
elif option == 'search':
|
||||
service = build('youtube', 'v3', developerKey=api_key).search()
|
||||
results = service.list(part='id', type='video', q=query, maxResults=10).execute()
|
||||
return results['items'] if results else False
|
||||
|
||||
def twitter(data):
|
||||
# auth = tweepy.OAuthHandler(twitter_consumer_key, twitter_consumer_secret)
|
||||
# auth.set_access_token(twitter_access_token, twitter_access_secret)
|
||||
# api = tweepy.API(auth)
|
||||
# api.update_status(data)
|
||||
pass
|
||||
|
||||
def unreal():
|
||||
pass
|
||||
|
||||
def anope():
|
||||
pass
|
|
@ -0,0 +1,110 @@
|
|||
#!/bin/sh
|
||||
# LXC Container Setup - developed by acidvegas (https://git.acid.vegas/supertools)
|
||||
|
||||
# Configuration
|
||||
SSH_PORT=60404
|
||||
USER_NAME="supernets"
|
||||
CONTAINER_NAME="ircd"
|
||||
|
||||
setup_root() {
|
||||
# Secure DNS (TEMP)
|
||||
printf "nameserver 208.67.222.222\nnameserver 208.67.220.220\nnameserver 2620:119:35::35\nnameserver 2620:119:53::53\n" > /etc/resolv.conf
|
||||
chattr +i /etc/resolv.conf
|
||||
|
||||
# Update & Install Packages
|
||||
apt-get update && apt-get upgrade
|
||||
apt-get install bridge-utils dirmngr htop gpg lxc man net-tools uidmap screen unattended-upgrades
|
||||
|
||||
# Wipe the journal and only use RAM storage
|
||||
journalctl --vacuum-time=1d
|
||||
printf "[Journal]\nStorage=volatile\nSplitMode=none\nRuntimeMaxUse=500K\n" > /etc/systemd/journald.conf
|
||||
systemctl restart systemd-journald
|
||||
|
||||
# Install & setup dropbear
|
||||
apt-get install -y dropbear
|
||||
printf 'NO_START=0\nDROPBEAR_PORT=$SSH_PORT\nDROPBEAR_EXTRA_ARGS=\nDROPBEAR_BANNER=\"\"\nDROPBEAR_ED25519KEY=\"/etc/dropbear/dropbear_ed25519_host_key\"\nDROPBEAR_RECEIVE_WINDOW=65536\n' > /etc/default/dropbear
|
||||
systemctl restart dropbear && systemctl enable dropbear
|
||||
|
||||
# Remove OpenSSH
|
||||
apt remove openssh-server && apt remove openssh-client
|
||||
apt purge openssh-server && apt purge openssh-client
|
||||
apt autoremove && apt autoclean
|
||||
systemctl stop ssh && systemctl disable ssh
|
||||
|
||||
# Disable history, logs, & IPv6
|
||||
printf "\nHISTSIZE=0\nHISTFILESIZE=0\nunset HISTFILE\n" >> /etc/bash.bashrc
|
||||
>/var/log/lastlog && chattr +i /var/log/lastlog
|
||||
sed -i 's/GRUB_CMDLINE_LINUX=""/GRUB_CMDLINE_LINUX="ipv6.disable=1"/' /etc/default/grub && update-grub
|
||||
|
||||
# Set locales
|
||||
echo "en_US.UTF-8 UTF-8" > /etc/locale.gen && locale-gen
|
||||
|
||||
# Add a new user
|
||||
useradd -m -s /bin/bash $USER_NAME && passwd $USER_NAME
|
||||
|
||||
# Change hostname
|
||||
nano /etc/hostname
|
||||
|
||||
# Enable user-level services
|
||||
loginctl enable-linger $USER_NAME
|
||||
|
||||
# Configure NAT
|
||||
iptables -t nat -A POSTROUTING -o eth0 -j MASQUERADE
|
||||
echo "1" > /proc/sys/net/ipv4/ip_forward
|
||||
printf "\nnet.ipv4.ip_forward=1\n" > /etc/sysctl.conf
|
||||
|
||||
# Create a runtime directory with the correct permissions
|
||||
mkdir -p /run/user/$(id -u $USER_NAME)
|
||||
chown $USER_NAME:$USER_NAME /run/user/$(id -u $USER_NAME)
|
||||
chmod 700 /run/user/$(id -u $USER_NAME)
|
||||
|
||||
# Set the subordinate UID/GID
|
||||
echo "$USER_NAME:100000:65536" > /etc/subuid
|
||||
echo "$USER_NAME:100000:65536" > /etc/subgid
|
||||
|
||||
# Create bridge (usually done automatically, see `ip addr` output for lxcbr0)
|
||||
#brctl addbr lxcbr0
|
||||
#ip addr add 192.168.1.10/24 dev lxcbr0
|
||||
#ip link set dev lxcbr0 up
|
||||
|
||||
# Restart the LXC service
|
||||
systemctl restart lxc
|
||||
}
|
||||
|
||||
setup_user() {
|
||||
# Add dropbear public key
|
||||
mkdir -p $HOME/.ssh
|
||||
printf "ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIBgw6zK6LghGq/6fdMGFKfH9fr+qCOASlD+Xi7Zoe7Ps acidvegas@blackhole" > $HOME/.ssh/authorized_keys
|
||||
chmod 700 $HOME/.ssh
|
||||
chown -R $USER $HOME/.ssh
|
||||
chmod 400 $HOME/.ssh/authorized_keys
|
||||
chattr +i $HOME/.ssh
|
||||
chattr +i $HOME/.ssh/authorized_keys
|
||||
|
||||
# Setup LXC configuration
|
||||
mkdir -p ~/.config/lxc
|
||||
printf "lxc.idmap = u 0 100000 65536\nlxc.idmap = g 0 100000 65536\nlxc.net.0.type = veth\nlxc.net.0.link = lxcbr0\nlxc.net.0.flags = up\nlxc.start.auto = 1\nlxc.start.delay = 5\n" > $HOME/.config/lxc/default.conf
|
||||
|
||||
# Setup runtime directory
|
||||
echo 'export XDG_RUNTIME_DIR=/run/user/$(id -u $USER)' >> ~/.bashrc
|
||||
export XDG_RUNTIME_DIR=/run/user/$(id -u $USER)
|
||||
|
||||
# Create a systemd user service
|
||||
mkdir -p $HOME/.config/systemd/user
|
||||
printf "[Unit]\nDescription=LXC Container %I\nAfter=network.target\n\n[Service]\nType=forking\nExecStart=/usr/bin/lxc-start -n %i\nExecStop=/usr/bin/lxc-stop -n %i\nRestart=on-failure\n\n[Install]\nWantedBy=default.target\n" > $HOME/.config/systemd/user/lxc-container@.service
|
||||
|
||||
# Create a container
|
||||
lxc-create -n $container -t download -- --dist debian --release bullseye --arch amd64
|
||||
|
||||
# Start & enable the service
|
||||
systemctl --user enable lxc-container@${container}.service
|
||||
systemctl --user start lxc-container@${container}.service
|
||||
}
|
||||
|
||||
setup_container() {
|
||||
# TODO: Provision container for services
|
||||
return
|
||||
}
|
||||
|
||||
#setup_root
|
||||
#setup_user
|
|
@ -0,0 +1,70 @@
|
|||
#!/bin/sh
|
||||
# IRCd Firewall - Developed by acidvegas (https://git.acid.vegas/supertools)
|
||||
|
||||
# nano /etc/default/grub
|
||||
# Add ipv6.disable=1 to GRUB_CMDLINE_LINUX_DEFAULT then run update-grub
|
||||
|
||||
# Configuration
|
||||
IP_MAIN="10.0.0.1" # Change this to your IP
|
||||
IP_HUB="10.0.0.2" # Change this to your hub IP
|
||||
PORT_SSH=22 # Default 22
|
||||
PORT_HUB=5900 # Default 5900
|
||||
|
||||
# Kernel hardening settings
|
||||
mkdir -p /etc/sysctl.d
|
||||
{
|
||||
printf "net.ipv4.conf.all.accept_source_route = 0\n"
|
||||
printf "net.ipv6.conf.all.accept_source_route = 0\n"
|
||||
printf "net.ipv4.conf.all.rp_filter = 1\n"
|
||||
printf "net.ipv4.conf.default.rp_filter = 1\n"
|
||||
printf "net.ipv4.conf.all.accept_redirects = 0\n"
|
||||
printf "net.ipv6.conf.all.accept_redirects = 0\n"
|
||||
printf "net.ipv4.conf.default.accept_redirects = 0\n"
|
||||
printf "net.ipv6.conf.default.accept_redirects = 0\n"
|
||||
printf "net.ipv4.conf.all.log_martians = 1\n"
|
||||
printf "kernel.randomize_va_space = 2\n"
|
||||
printf "fs.suid_dumpable = 0\n"
|
||||
} > /etc/sysctl.d/99-custom-hardening.conf
|
||||
|
||||
# Apply hardening settings
|
||||
sysctl -p /etc/sysctl.d/99-custom-hardening.conf
|
||||
|
||||
# Flush existing rules
|
||||
iptables -F
|
||||
iptables -X
|
||||
iptables -t nat -F
|
||||
iptables -t nat -X
|
||||
iptables -t mangle -F
|
||||
iptables -t mangle -X
|
||||
|
||||
# Default chain policies
|
||||
iptables -P INPUT DROP
|
||||
iptables -P FORWARD DROP
|
||||
iptables -P OUTPUT ACCEPT
|
||||
|
||||
# Common Firewall rules
|
||||
iptables -A INPUT -m conntrack --ctstate ESTABLISHED,RELATED -j ACCEPT
|
||||
iptables -A INPUT -p icmp --icmp-type echo-request -j DROP
|
||||
iptables -A INPUT -i lo -j ACCEPT
|
||||
|
||||
# Allow SSH
|
||||
iptables -A INPUT -p tcp -s $IP_MAIN --dport $PORT_SSH -j ACCEPT
|
||||
|
||||
# Allow IRCd Hub
|
||||
iptables -A INPUT -p tcp -s $IP_HUB --dport $PORT_HUB -j ACCEPT
|
||||
|
||||
# Allow IRCd Ports
|
||||
iptables -A INPUT -p tcp --dport 6660:6669 -j ACCEPT
|
||||
iptables -A INPUT -p tcp --dport 7000 -j ACCEPT
|
||||
|
||||
# Allow IRCd TLS Ports
|
||||
iptables -A INPUT -p tcp --dport 6697 -j ACCEPT
|
||||
iptables -A INPUT -p tcp --dport 9999 -j ACCEPT
|
||||
|
||||
# Save rules
|
||||
apt-get install -y iptables-persistent
|
||||
netfilter-persistent save
|
||||
systemctl enable netfilter-persistent && systemctl start netfilter-persistent
|
||||
|
||||
# Show rules
|
||||
iptables -L -v -n
|
|
@ -1,9 +1,6 @@
|
|||
#!/usr/bin/env python
|
||||
# supernets namecheap api tool - developed by acidvegas in python (https://git.acid.vegas/supertools)
|
||||
|
||||
''' tool for automatically renewing positivessl certificates '''
|
||||
''' this is still a work in progress...good thing i have a year to finish '''
|
||||
|
||||
import re
|
||||
import requests
|
||||
import xml.etree.ElementTree as et
|
||||
|
@ -35,11 +32,11 @@ class domains:
|
|||
|
||||
def setHosts(type, address):
|
||||
payload = {
|
||||
'SLD' : 'supernets'
|
||||
'TLD' : 'org'
|
||||
'SLD' : 'supernets',
|
||||
'TLD' : 'org',
|
||||
'HostName' : 'irc',
|
||||
'RecordType' : type,
|
||||
'Address' : address
|
||||
'Address' : address,
|
||||
'TTL' : '60'
|
||||
}
|
||||
data = api('namecheap.domains.dns.setHosts', payload)
|
||||
|
@ -72,7 +69,7 @@ class ssl:
|
|||
def renew(id):
|
||||
'''https://www.namecheap.com/support/api/methods/ssl/renew/'''
|
||||
payload = {
|
||||
'CertificateID':id.
|
||||
'CertificateID':id,
|
||||
'SSLType': 'PositiveSSL',
|
||||
'years': '1' # or 5
|
||||
}
|
Loading…
Reference in New Issue