forked from axiom-team/astroport
loveland find nodename clean
This commit is contained in:
parent
8d6c62fbcf
commit
a94c895cb0
|
@ -1,7 +1,7 @@
|
|||
#!/bin/bash
|
||||
########################################################################
|
||||
# Author: Fred (support@qo-op.com)
|
||||
# Version: 2020.03.24
|
||||
# Version: 0.3
|
||||
# License: AGPL-3.0 (https://choosealicense.com/licenses/agpl-3.0/)
|
||||
########################################################################
|
||||
{
|
||||
|
@ -102,7 +102,7 @@ sudo rm -f /etc/nginx/sites-enabled/default
|
|||
sudo systemctl restart nginx || err=1
|
||||
|
||||
|
||||
|
||||
# KILL RUNNING OASIS
|
||||
kill -9 $(ps auxf --sort=+utime | grep -w oasis | grep -v -E 'color=auto|grep' | tail -n 1 | awk '{print $2}')
|
||||
echo "REstarting OASIS with good $nodename & network config"
|
||||
[[ ! $(which nslookup) ]] && sudo apt-get install lolcat dnsutils -y
|
||||
|
|
Binary file not shown.
After Width: | Height: | Size: 234 KiB |
|
@ -0,0 +1,34 @@
|
|||
#!/bin/bash
|
||||
########################################################################
|
||||
# Author: Fred (support@qo-op.com)
|
||||
# Version: 0.3
|
||||
# License: AGPL-3.0 (https://choosealicense.com/licenses/agpl-3.0/)
|
||||
########################################################################
|
||||
{
|
||||
MY_PATH="`dirname \"$0\"`" # relative
|
||||
MY_PATH="`( cd \"$MY_PATH\" && pwd )`" # absolutized and normalized
|
||||
ME="${0##*/}"
|
||||
|
||||
[[ ! $(which nslookup) ]] && sudo apt-get install dnsutils -y
|
||||
[[ ! $(which lolcat) ]] && sudo apt-get install lolcat -y
|
||||
|
||||
echo '
|
||||
__ ___ _ ___
|
||||
/ |/ /_ __ ____ ____ _____ ___ ___ (_)___/__ \
|
||||
/ /|_/ / / / / / __ \/ __ `/ __ `__ \/ _ \ / / ___// _/
|
||||
/ / / / /_/ / / / / / /_/ / / / / / / __/ / (__ )/_/
|
||||
/_/ /_/\__, / /_/ /_/\__,_/_/ /_/ /_/\___/ /_/____/(_)
|
||||
/____/
|
||||
' | lolcat
|
||||
|
||||
myip=$(hostname -I | awk '{print $1}')
|
||||
# Check if IP is from reserved LAN addresses
|
||||
isLAN=$(echo $myip | grep -E "/(^127\.)|(^192\.168\.)|(^10\.)|(^172\.1[6-9]\.)|(^172\.2[0-9]\.)|(^172\.3[0-1]\.)|(^::1$)|(^[fF][cCdD])/")
|
||||
# Find 1st route gateway
|
||||
myRouter=$(sudo route -n | head -n 3 | tail -n 1 | awk '{print $2}')
|
||||
# Ask to the router its name (BOX DNS or system defined)
|
||||
[[ $isLAN ]] && nodename=$(sudo nslookup $myip $myRouter | head -n 1 | awk -F ' = ' '{print $2}' | sed 's/\.[^.]*$//') \
|
||||
|| nodename=$(sudo nslookup $myip | head -n 1 | awk -F ' = ' '{print $2}' | sed 's/\.[^.]*$//')
|
||||
echo $nodename
|
||||
|
||||
}
|
|
@ -1,9 +0,0 @@
|
|||
#!/bin/bash
|
||||
if [[ ! -f transiscope.json ]]; then
|
||||
echo "Premier lancement, récupération des données, veuillez patientez ..."
|
||||
./generate_transiscope.sh
|
||||
fi
|
||||
|
||||
cat transiscope.json | jq '.[] | .name, .abstract, .geo'
|
||||
|
||||
exit 0
|
|
@ -1,7 +0,0 @@
|
|||
#!/bin/bash
|
||||
|
||||
curl -s https://transiscope.gogocarto.fr/api/elements | jq .data > /tmp/tmp_transiscope.json || exit 1
|
||||
[[ -f transiscope.json ]] && rm transiscope.json
|
||||
mv /tmp/tmp_transiscope.json transiscope.json
|
||||
|
||||
exit 0
|
|
@ -1,9 +0,0 @@
|
|||
#!/usr/bin/python3
|
||||
|
||||
import cloudscraper
|
||||
|
||||
url = "https://transiscope.org/carte-des-alternatives/#/carte/@46.33,-1.34,6z?cat=all"
|
||||
|
||||
scraper = cloudscraper.create_scraper()
|
||||
#scraper = cloudscraper.CloudScraper() # CloudScraper inherits from requests.Session
|
||||
print(scraper.get(url).content)
|
|
@ -1,15 +0,0 @@
|
|||
#!/usr/bin/python3
|
||||
|
||||
|
||||
import requests
|
||||
from parsel import Selector
|
||||
|
||||
|
||||
#url = 'https://transiscope.org/carte-des-alternatives/#/carte/@46.33,-1.34,6z?cat=all'
|
||||
url = 'https://www.kurzy.cz/banky/bankomaty/zatec-okres-louny/'
|
||||
r = requests.get(url)
|
||||
sel = Selector(r.text)
|
||||
all_address = sel.xpath('//script[contains(.,"point_list")]').re_first(r'point_list = \[(.*)\]\];')
|
||||
|
||||
for item in all_address.split(','):
|
||||
print(item)
|
|
@ -1,18 +0,0 @@
|
|||
#!/usr/bin/python3
|
||||
|
||||
from bs4 import BeautifulSoup
|
||||
import urllib.request
|
||||
import csv
|
||||
|
||||
urlpage = 'https://transiscope.org/carte-des-alternatives/#/carte/@46.33,-1.34,6z?cat=all'
|
||||
|
||||
|
||||
# query the website and return the html to the variable 'page'
|
||||
page = urllib.request.urlopen(urlpage)
|
||||
# parse the html using beautiful soup and store in variable 'soup'
|
||||
soup = BeautifulSoup(page, 'html.parser')
|
||||
|
||||
|
||||
table = soup.find(attrs={'id': 'element-info'})
|
||||
results = table.find_all('li')
|
||||
print('Number of results', len(results))
|
Loading…
Reference in New Issue