import gradio as gr import urllib.request import urllib.parse import json import urllib.error import socket import requests def check_domain(domain): """Check if a URL is valid. Args: domain (str): The domain to check. Returns: bool: True if the URL is valid, False otherwise. """ print(socket.getaddrinfo(domain, 443)) resp = requests.get("https://" + domain, timeout=5) print(resp.status_code, resp.text[:200]) return True def ping_website(url): """Ping a website to check if it's reachable. Args: url (str): The URL of the website to ping (e.g. 'https://example.com'). Returns: dict: A dictionary with the status of the ping. """ if not url: return {"error": "missing url"} print("Pinging URL:", url) req = urllib.request.Request(url, headers={"User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/142.0.0.0 Safari/537.36"}) try: with urllib.request.urlopen(req, timeout=10) as resp: status_code = resp.getcode() return {"status": "reachable", "status_code": status_code} except urllib.error.HTTPError as e: print("HTTP error:", e) return {"error": "http_error", "status": e.code, "reason": e.reason} except urllib.error.URLError as e: print("URL error:", e) return {"error": "url_error", "reason": str(e)} except Exception as e: print("Unexpected error:", e) return {"error": "unexpected_error", "reason": str(e)} def analyze_favicon(url): """Fetch favicon analysis JSON from realfavicongenerator.net and return it. Args: url (str): The URL to analyze (e.g. 'https://example.com'). Returns: dict: Parsed JSON response from the external API, or an error dict on failure. """ if not url: return {"error": "missing url"} print("Analyzing favicon for URL:", url) api_base = "https://realfavicongenerator.net/api/v2/favicons/analysis" query = urllib.parse.urlencode({"url": url}) full_url = f"{api_base}?{query}" req = urllib.request.Request(full_url, headers={"User-Agent": "rfg-mcp-server/1.0"}) try: with urllib.request.urlopen(req, timeout=10) as resp: raw = resp.read() # Attempt to decode and parse JSON try: return json.loads(raw.decode("utf-8")) except Exception as ex: # Print the parsing error and include it in the returned dict print("JSON parse error:", ex) # If parsing fails, return raw text under a key try: text = raw.decode("utf-8", errors="replace") except Exception: text = "" return {"error": "invalid_json", "parse_error": str(ex), "raw": text} except urllib.error.HTTPError as e: # Print the http error and include it in the returned dict print("HTTP/Api error:", e) try: body = e.read().decode("utf-8", errors="replace") except Exception: body = None return {"error": "http_error", "status": e.code, "reason": e.reason, "body": body} except urllib.error.URLError as e: return {"error": "url_error", "reason": str(e)} except Exception as e: return {"error": "unexpected_error", "reason": str(e)} analyze_favicon_interface = gr.Interface( fn=analyze_favicon, inputs=["textbox"], outputs=gr.Textbox(lines=15), title="Analyze Favicon Tool" ) ping_website_interface = gr.Interface( fn=ping_website, inputs=["textbox"], outputs="textbox", title="Ping Website" ) check_domain_interface = gr.Interface( fn=check_domain, inputs=["textbox"], outputs="checkbox", title="Check Domain Tool" ) server = gr.TabbedInterface( [analyze_favicon_interface, ping_website_interface, check_domain_interface], tab_names=["Analyze Favicon", "Ping Website", "Check Domain"] ) if __name__ == "__main__": server.launch(mcp_server=True)