diff --git a/.install/loveland.sh b/.install/loveland.sh index a18e23c..1c41edb 100755 --- a/.install/loveland.sh +++ b/.install/loveland.sh @@ -1,7 +1,7 @@ #!/bin/bash ######################################################################## # Author: Fred (support@qo-op.com) -# Version: 2020.03.24 +# Version: 0.3 # License: AGPL-3.0 (https://choosealicense.com/licenses/agpl-3.0/) ######################################################################## { @@ -102,7 +102,7 @@ sudo rm -f /etc/nginx/sites-enabled/default sudo systemctl restart nginx || err=1 - +# KILL RUNNING OASIS kill -9 $(ps auxf --sort=+utime | grep -w oasis | grep -v -E 'color=auto|grep' | tail -n 1 | awk '{print $2}') echo "REstarting OASIS with good $nodename & network config" [[ ! $(which nslookup) ]] && sudo apt-get install lolcat dnsutils -y diff --git a/www/LOVELand/img/content_not_available.gif b/www/LOVELand/img/content_not_available.gif new file mode 100644 index 0000000..87dd7c8 Binary files /dev/null and b/www/LOVELand/img/content_not_available.gif differ diff --git a/zen/tools/nodename_find.sh b/zen/tools/nodename_find.sh new file mode 100755 index 0000000..51ee770 --- /dev/null +++ b/zen/tools/nodename_find.sh @@ -0,0 +1,34 @@ +#!/bin/bash +######################################################################## +# Author: Fred (support@qo-op.com) +# Version: 0.3 +# License: AGPL-3.0 (https://choosealicense.com/licenses/agpl-3.0/) +######################################################################## +{ +MY_PATH="`dirname \"$0\"`" # relative +MY_PATH="`( cd \"$MY_PATH\" && pwd )`" # absolutized and normalized +ME="${0##*/}" + +[[ ! $(which nslookup) ]] && sudo apt-get install dnsutils -y +[[ ! $(which lolcat) ]] && sudo apt-get install lolcat -y + +echo ' + __ ___ _ ___ + / |/ /_ __ ____ ____ _____ ___ ___ (_)___/__ \ + / /|_/ / / / / / __ \/ __ `/ __ `__ \/ _ \ / / ___// _/ + / / / / /_/ / / / / / /_/ / / / / / / __/ / (__ )/_/ +/_/ /_/\__, / /_/ /_/\__,_/_/ /_/ /_/\___/ /_/____/(_) + /____/ +' | lolcat + +myip=$(hostname -I | awk '{print $1}') +# Check if IP is from reserved LAN addresses +isLAN=$(echo $myip | grep -E "/(^127\.)|(^192\.168\.)|(^10\.)|(^172\.1[6-9]\.)|(^172\.2[0-9]\.)|(^172\.3[0-1]\.)|(^::1$)|(^[fF][cCdD])/") +# Find 1st route gateway +myRouter=$(sudo route -n | head -n 3 | tail -n 1 | awk '{print $2}') +# Ask to the router its name (BOX DNS or system defined) +[[ $isLAN ]] && nodename=$(sudo nslookup $myip $myRouter | head -n 1 | awk -F ' = ' '{print $2}' | sed 's/\.[^.]*$//') \ +|| nodename=$(sudo nslookup $myip | head -n 1 | awk -F ' = ' '{print $2}' | sed 's/\.[^.]*$//') +echo $nodename + +} diff --git a/zen/tools/scraping/transiscope/explore_transiscope.sh b/zen/tools/scraping/transiscope/explore_transiscope.sh deleted file mode 100755 index aaa7caa..0000000 --- a/zen/tools/scraping/transiscope/explore_transiscope.sh +++ /dev/null @@ -1,9 +0,0 @@ -#!/bin/bash -if [[ ! -f transiscope.json ]]; then - echo "Premier lancement, récupération des données, veuillez patientez ..." - ./generate_transiscope.sh -fi - -cat transiscope.json | jq '.[] | .name, .abstract, .geo' - -exit 0 diff --git a/zen/tools/scraping/transiscope/generate_transiscope.sh b/zen/tools/scraping/transiscope/generate_transiscope.sh deleted file mode 100755 index 7be1093..0000000 --- a/zen/tools/scraping/transiscope/generate_transiscope.sh +++ /dev/null @@ -1,7 +0,0 @@ -#!/bin/bash - -curl -s https://transiscope.gogocarto.fr/api/elements | jq .data > /tmp/tmp_transiscope.json || exit 1 -[[ -f transiscope.json ]] && rm transiscope.json -mv /tmp/tmp_transiscope.json transiscope.json - -exit 0 diff --git a/zen/tools/scraping/transiscope/tests_scrap/scrap.py b/zen/tools/scraping/transiscope/tests_scrap/scrap.py deleted file mode 100755 index 34e5413..0000000 --- a/zen/tools/scraping/transiscope/tests_scrap/scrap.py +++ /dev/null @@ -1,9 +0,0 @@ -#!/usr/bin/python3 - -import cloudscraper - -url = "https://transiscope.org/carte-des-alternatives/#/carte/@46.33,-1.34,6z?cat=all" - -scraper = cloudscraper.create_scraper() -#scraper = cloudscraper.CloudScraper() # CloudScraper inherits from requests.Session -print(scraper.get(url).content) diff --git a/zen/tools/scraping/transiscope/tests_scrap/scrap3.py b/zen/tools/scraping/transiscope/tests_scrap/scrap3.py deleted file mode 100755 index 39d80e6..0000000 --- a/zen/tools/scraping/transiscope/tests_scrap/scrap3.py +++ /dev/null @@ -1,15 +0,0 @@ -#!/usr/bin/python3 - - -import requests -from parsel import Selector - - -#url = 'https://transiscope.org/carte-des-alternatives/#/carte/@46.33,-1.34,6z?cat=all' -url = 'https://www.kurzy.cz/banky/bankomaty/zatec-okres-louny/' -r = requests.get(url) -sel = Selector(r.text) -all_address = sel.xpath('//script[contains(.,"point_list")]').re_first(r'point_list = \[(.*)\]\];') - -for item in all_address.split(','): - print(item) diff --git a/zen/tools/scraping/transiscope/tests_scrap/scrapsoup.py b/zen/tools/scraping/transiscope/tests_scrap/scrapsoup.py deleted file mode 100755 index f83ea41..0000000 --- a/zen/tools/scraping/transiscope/tests_scrap/scrapsoup.py +++ /dev/null @@ -1,18 +0,0 @@ -#!/usr/bin/python3 - -from bs4 import BeautifulSoup -import urllib.request -import csv - -urlpage = 'https://transiscope.org/carte-des-alternatives/#/carte/@46.33,-1.34,6z?cat=all' - - -# query the website and return the html to the variable 'page' -page = urllib.request.urlopen(urlpage) -# parse the html using beautiful soup and store in variable 'soup' -soup = BeautifulSoup(page, 'html.parser') - - -table = soup.find(attrs={'id': 'element-info'}) -results = table.find_all('li') -print('Number of results', len(results))