|
import requests
|
|
import pandas as pd
|
|
from bs4 import BeautifulSoup
|
|
import concurrent.futures
|
|
import time
|
|
from tqdm import tqdm
|
|
|
|
|
|
input_csv = "acharya_prashant_articles.csv"
|
|
output_csv = "Final_articles_content_with_hindi.xlsx"
|
|
|
|
|
|
urls_df = pd.read_csv(input_csv,encoding='utf-8')
|
|
urls = urls_df['Article URL'].tolist()
|
|
|
|
|
|
|
|
def fetch_article_data(url):
|
|
try:
|
|
response = requests.get(url, timeout=100)
|
|
response.encoding = 'utf-8'
|
|
if response.status_code == 200:
|
|
soup = BeautifulSoup(response.text, 'html.parser')
|
|
|
|
|
|
title = soup.find('title').text.strip()
|
|
|
|
|
|
content_container = soup.find("div", class_="flex flex-col space-y-4 laptop:space-y-4.5")
|
|
|
|
content = ""
|
|
if content_container:
|
|
nested_divs = content_container.find_all("div", class_="flex flex-col text-justify")
|
|
for div in nested_divs:
|
|
content += div.text.strip() + "\n"
|
|
|
|
return {"URL": url, "Title": title, "Content": content.strip()}
|
|
else:
|
|
return {"URL": url, "Title": None, "Content": None}
|
|
except Exception as e:
|
|
print(f"Error processing URL: {url}, Error: {e}")
|
|
return {"URL": url, "Title": None, "Content": None}
|
|
|
|
|
|
|
|
def process_urls_in_parallel(urls, max_workers=10):
|
|
results = []
|
|
with concurrent.futures.ThreadPoolExecutor(max_workers=max_workers) as executor:
|
|
|
|
for result in tqdm(executor.map(fetch_article_data, urls), total=len(urls)):
|
|
results.append(result)
|
|
|
|
|
|
if len(results) % 100 == 0:
|
|
temp_df = pd.DataFrame(results)
|
|
temp_df.to_csv(output_csv, index=False, encoding='utf-8')
|
|
|
|
return results
|
|
|
|
|
|
|
|
start_time = time.time()
|
|
articles_data = process_urls_in_parallel(urls, max_workers=20)
|
|
end_time = time.time()
|
|
|
|
|
|
articles_df = pd.DataFrame(articles_data)
|
|
articles_df.to_excel(output_csv, index=False)
|
|
|
|
print(f"Article details saved to '{output_csv}'")
|
|
print(f"Total time taken: {end_time - start_time:.2f} seconds")
|
|
|