import re import requests def tokenize_url(url): tokens = re.findall(r'[a-zA-Z0-9]+', url) new_tokens = [] for token in tokens: new_tokens.append(token) if token.find(".") >= 0: dot_split = token.split('.') if "com" in dot_split: dot_split.remove("com") if "www" in dot_split: dot_split.remove("www") new_tokens.extend(dot_split) return new_tokens def check_dictionary(words): english_words = requests.get("https://raw.githubusercontent.com/dwyl/english-words/master/words.txt").text.splitlines() return [int(word.lower() in english_words) for word in words] url = input("Enter a URL: ") tokens = tokenize_url(url) words_in_dictionary = check_dictionary(tokens) print("Tokenized URL:", tokens) print("Words in English Dictionary:", words_in_dictionary)