import requests, base64 from bs4 import BeautifulSoup import re import gradio as gr def get_blocked_urls(): """ Get a list of blocked URLs. Returns: list: A list of blocked URLs. Raises: None. """ url = 'https://colab.research.google.com/' r = requests.get(url) if r.status_code == 200: result = [] soup = BeautifulSoup(r.text, 'html.parser') # search for script that contains "external_polymer_binary" in attr for script in soup.find_all('script'): if "external_polymer_binary" in str(script): r_js = requests.get(script['src']) # print(r_js.text) pattern = r"'(.*?)webui(.*?)'" match = re.search(pattern, r_js.text) raw_string = match.group(0) # trim 1 char front and back, split the text with ';' into array raw_string = raw_string[1:-1].split(';') result = raw_string for i in range(len(result)): decodedurl = result[i] repeats = 0 try: for _ in range(10): decodedurl = base64.b64decode(f"{decodedurl}========================================================").decode('utf-8') # this took 2 hours to figure out repeats += 1 except: pass if decodedurl != result[i]: result[i] = f"{result[i]} < {decodedurl} x{repeats}>[thisisb64]" if len(result) > 0: return (result) else: return (["failed :<"]) else: return (["res code: "+r.status_code]) def handle_refresh(): """ Generates an HTML ordered list of blocked URLs. Returns: str: The HTML string containing the ordered list of blocked URLs. """ xs = "
    " for url in get_blocked_urls(): if "[thisisb64]" in url: url = url.replace("[thisisb64]", "") nondecoded = url.split('<')[0] decodedurl = url.split('<')[1] decodedurl = f"<{decodedurl.replace('>', '>')}" xs += '
  1. '+nondecoded+'' + '

    '+decodedurl+'

  2. ' else: xs += "
  3. "+url+"
  4. " xs += "
" return xs with gr.Blocks( analytics_enabled=False, title="GGL Checks", theme="NoCrypt/miku" ) as demo: gr.HTML("""

GGL Checks

""") refresh = gr.Button("Refresh", variant="primary") html = gr.HTML() refresh.click(handle_refresh, outputs=[html]) demo.launch(debug=True)