import requests import os from bs4 import BeautifulSoup import json folder_path = "transcripts" if not os.path.exists(folder_path): os.makedirs(folder_path) for i in range(1, 139): try: url = f"https://darknetdiaries.com/transcript/{i}" r = requests.get(url) soup = BeautifulSoup(r.text, 'html.parser') transcript = soup.find('pre').get_text() url = f"https://api.darknetdiaries.com/{i}.json" r = requests.get(url) parsed_json = json.loads(r.text) title = parsed_json["episode_name"] number = parsed_json["episode_number"] downloads = parsed_json["total_downloads"] with open(f"{folder_path}/episode_{number}.txt", "w") as f: f.write(f"{title}\n{downloads}\n{transcript}") print(f"{number} {title}") except Exception: print(f"Failed scraping episode {i}")