Create your own Wikipedia
Create your own Wikipedia with Python
Code:
###made by MAnas
import requests
from bs4 import BeautifulSoup
import html
import tkinter as tk
from tkinter import Label, Entry, Button, Scrollbar, Text, Listbox, END, messagebox
def search_wikipedia(event=None): # Accept an event argument
search_query = entry.get()
# Define the Wikipedia API URL for search
api_url = "https://en.wikipedia.org/w/api.php"
# Set the parameters for the API search request
search_params = {
"action": "query",
"format": "json",
"list": "search",
"srsearch": search_query,
}
# Send a request to the Wikipedia API for search results
response = requests.get(api_url, params=search_params)
# Check if the search request was successful
if response.status_code == 200:
data = response.json()
search_results = data.get("query", {}).get("search", [])
# Clear the current list of search results
result_listbox.delete(0, END)
# Display the search results in the listbox
for result in search_results:
title = result.get("title")
result_listbox.insert(END, title)
else:
messagebox.showerror("Error", "Failed to retrieve search results from Wikipedia.")
def display_selected_article(event):
# Get the selected article title from the listbox
selected_title = result_listbox.get(result_listbox.curselection())
# Update the entry field with the selected title
entry.delete(0, END)
entry.insert(0, selected_title)
# Call the scrape_wikipedia function to fetch and display the article content
scrape_wikipedia()
def scrape_wikipedia():
article_title = entry.get()
# Define the Wikipedia API URL
api_url = f"https://en.wikipedia.org/w/api.php"
# Set the parameters for the API request
params = {
"action": "query",
"format": "json",
"prop": "extracts",
"exintro": True,
"titles": article_title,
}
# Send a request to the Wikipedia API
response = requests.get(api_url, params=params)
# Check if the request was successful
if response.status_code == 200:
data = response.json()
page_id = list(data["query"]["pages"].keys())[0]
introduction = data["query"]["pages"][page_id]["extract"]
# Use BeautifulSoup to parse and clean the HTML
soup = BeautifulSoup(introduction, "html.parser")
cleaned_text = soup.get_text()
# Decode HTML entities
cleaned_text = html.unescape(cleaned_text)
result_text.config(state='normal')
result_text.delete('1.0', tk.END)
result_text.insert(tk.END, cleaned_text)
result_text.config(state='disabled')
else:
messagebox.showerror("Error", "Failed to retrieve data from Wikipedia.")
# Create a GUI window
root = tk.Tk()
root.title("Wikipedia Scraper")
# Create and place GUI components
frame = tk.Frame(root)
frame.pack(padx=10, pady=10)
label = Label(frame, text="Subscribe to Py3")
label.pack()
label = Label(frame, text="MasterPedia")
label.pack()
entry = Entry(frame, width=40)
entry.pack()
search_button = Button(frame, text="Search", command=search_wikipedia)
search_button.pack()
result_listbox = Listbox(frame, width=80, height=10)
result_listbox.pack()
result_listbox.bind("<<ListboxSelect>>", display_selected_article)
result_text = Text(frame, width=80, height=27.0) # Increase the height here
result_text.pack()
result_text.config(state='disabled')
scrollbar = Scrollbar(frame, command=result_text.yview)
scrollbar.pack(side='right', fill='y')
result_text.config(yscrollcommand=scrollbar.set)
# Bind the "Return" key to trigger the search function
entry.bind("<Return>", search_wikipedia)
root.mainloop()
.jpeg)
Comments
Post a Comment