import requests
from bs4 import BeautifulSoup
import time
import urllib3
urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)
# this script checks for status code and return a 200 ok, or will print any changes on status code
def get_page_version(url):
try:
response = requests.get(url, verify=False) # for SSL issues
if response.status_code != 200:
print(f"Server {url} is up and running and responding with status code: {response.status_code}")
return None
else:
print(f"Server {url} responded with status code: {response.status_code}")
soup = BeautifulSoup(response.text, 'html.parser')
h1_tag = soup.find('h1')
return h1_tag.text if h1_tag else None
except Exception as e:
print(f"Failed to connect to {url}")
print(f"Error: {str(e)}")
return None
# This script will read the versions responses on the html tag and will read the current version
# we just need to add additional endpoints if needed
def main():
urls = [
"http://ec2-3-15-25-206.us-east-2.compute.amazonaws.com",
"http://ec2-3-21-231-216.us-east-2.compute.amazonaws.com"
]
versions = {url: None for url in urls}
# time interval between checks
interval = 5 * 60 # 5 minutes
while True:
for url in urls:
new_version = get_page_version(url)
old_version = versions[url]
if new_version != old_version:
print(f"Version changed for {url} from {old_version} to {new_version}")
versions[url] = new_version
time.sleep(interval)
if __name__ == "__main__":
main()
runs with `python3 check.py` on 161.35.223.47 ( a test server )
This script use BeautifulSoup and requests libraries, saves the versioning number finds on our 2 URL endpoints number it sees on each run, and only prints the version number and the status of the response of the server if it's different from the last time it checked.