import requests
from bs4 import BeautifulSoup
import pandas as pd
from textblob import TextBlob
import networkx as nx
import matplotlib.pyplot as plt
from matplotlib.widgets import Cursor
import tkinter as tk
from tkinter import scrolledtext
import threading
import numpy as np
from matplotlib.patches import Circle
import matplotlib.animation as animation
from newspaper import Article

class DraggableNode:
    def __init__(self, node, pos, G, ax, fig, pos_dict, nodes, edges):
        self.node = node
        self.pos = pos
        self.G = G
        self.ax = ax
        self.fig = fig
        self.pos_dict = pos_dict
        self.nodes = nodes
        self.edges = edges
        self.press = None
        self.background = None
        
        self.connect()

    def connect(self):
        self.cidpress = self.fig.canvas.mpl_connect('button_press_event', self.on_press)
        self.cidrelease = self.fig.canvas.mpl_connect('button_release_event', self.on_release)
        self.cidmotion = self.fig.canvas.mpl_connect('motion_notify_event', self.on_motion)

    def on_press(self, event):
        if event.inaxes != self.ax:
            return
        x, y = self.pos_dict[self.node]
        if abs(event.xdata - x) < 0.1 and abs(event.ydata - y) < 0.1:
            self.press = x, y, event.xdata, event.ydata
            self.ax.set_animated(True)
            self.fig.canvas.draw()
            self.background = self.fig.canvas.copy_from_bbox(self.ax.bbox)

    def on_motion(self, event):
        if self.press is None or event.inaxes != self.ax:
            return
        x0, y0, xpress, ypress = self.press
        dx = event.xdata - xpress
        dy = event.ydata - ypress
        self.pos_dict[self.node] = (x0 + dx, y0 + dy)
        self.update_graph()

    def on_release(self, event):
        if self.press is None:
            return
        self.press = None
        self.ax.set_animated(False)
        self.fig.canvas.draw()

    def update_graph(self):
        self.ax.clear()
        nx.draw(self.G, pos=self.pos_dict, 
               ax=self.ax,
               node_color=self.nodes['color'],
               node_size=self.nodes['size'],
               with_labels=True,
               font_size=self.nodes['font_size'],
               font_weight=self.nodes['font_weight'],
               width=self.edges['width'])
        self.fig.canvas.draw_idle()

class ArticleWindow:
    def __init__(self, title, content, url):
        self.root = tk.Tk()
        self.root.title(title)
        self.root.geometry("800x600")
        
        url_label = tk.Label(self.root, text=f"Sursa: {url}", wraplength=780)
        url_label.pack(pady=5)
        
        self.text_area = scrolledtext.ScrolledText(self.root, wrap=tk.WORD, width=80, height=30)
        self.text_area.pack(padx=10, pady=10, expand=True, fill='both')
        
        self.text_area.insert(tk.INSERT, content)
        self.text_area.configure(state='disabled')
        
        close_button = tk.Button(self.root, text="Închide", command=self.root.destroy)
        close_button.pack(pady=5)
        
        self.center_window()
        
    def center_window(self):
        self.root.update_idletasks()
        width = self.root.winfo_width()
        height = self.root.winfo_height()
        x = (self.root.winfo_screenwidth() // 2) - (width // 2)
        y = (self.root.winfo_screenheight() // 2) - (height // 2)
        self.root.geometry(f'{width}x{height}+{x}+{y}')

    def show(self):
        self.root.mainloop()

class InfoGatherer:
    def __init__(self, api_key):
        self.api_key = api_key
        self.base_url = "https://api.bing.microsoft.com/v7.0/search"
        self.headers = {
            'Ocp-Apim-Subscription-Key': self.api_key
        }
        self.data = []
        self.keywords = []
        self.draggable_nodes = []
    
    def set_keywords(self, keywords):
        """Set the keywords to search for."""
        self.keywords = keywords
        self.data = []  # Reset data when new keywords are set
        
    def generate_query(self, keyword):
        """Generate a search query for a specific keyword."""
        return f"{keyword} site:.ro"

    def fetch_results(self, keyword):
        max_retries = 3
        for attempt in range(max_retries):
            try:
                query = self.generate_query(keyword)
                params = {'q': query}
                response = requests.get(self.base_url, headers=self.headers, params=params, timeout=30)
                print(f"Căutare pentru cuvântul cheie: {keyword}")
                print(f"Cod HTTP: {response.status_code}")
                response.raise_for_status()
                return response.json()
            except requests.exceptions.Timeout:
                print(f"Timeout la încercarea {attempt + 1} din {max_retries}. Se încearcă din nou...")
            except requests.exceptions.RequestException as e:
                print(f"Eroare la preluarea rezultatelor: {e}")
                break
        print("Nu s-au putut prelua rezultatele după mai multe încercări.")
        return None

    def fetch_full_article(self, url):
        """Fetch and parse the full article content from the URL using newspaper3k."""
        try:
            article = Article(url, language='ro')
            article.download()
            article.parse()
            content = []
            if article.title:
                content.append(f"Titlu: {article.title}\n")
            if article.authors:
                content.append(f"Autori: {', '.join(article.authors)}\n")
            if article.publish_date:
                content.append(f"Data publicării: {article.publish_date.strftime('%d-%m-%Y %H:%M')}\n")
            if article.text:
                content.append("\nConținut articol:\n")
                content.append(article.text)
            if not article.text:
                response = requests.get(url, timeout=30)
                response.raise_for_status()
                soup = BeautifulSoup(response.text, 'html.parser')
                for element in soup(['script', 'style', 'nav', 'header', 'footer', 'iframe', 'ads']):
                    element.decompose()
                text = soup.get_text(separator='\n')
                lines = (line.strip() for line in text.splitlines())
                text = '\n'.join(line for line in lines if line)
                content.append(text)
            return '\n'.join(content)
        except Exception as e:
            return f"Nu s-a putut prelua conținutul articolului. Eroare: {str(e)}"

    def parse_results(self, json_content, keyword):
        if json_content and 'webPages' in json_content:
            search_results = json_content['webPages']['value']
            for result in search_results:
                title = result.get('name', 'Fără titlu')
                link = result.get('url', 'Fără link')
                snippet = result.get('snippet', 'Fără descriere')
                sentiment = TextBlob(snippet).sentiment
                self.data.append({
                    'Keyword': keyword,
                    'Title': title,
                    'Link': link,
                    'Snippet': snippet,
                    'Polarity': sentiment.polarity,
                    'Subjectivity': sentiment.subjectivity
                })
        else:
            print(f"Nu există conținut de procesat pentru cuvântul cheie: {keyword}")

    def save_to_csv(self, filename):
        if self.data:
            df = pd.DataFrame(self.data)
            df.to_csv(filename, index=False)
            print(f"Date salvate în {filename}")
        else:
            print("Nu există date de salvat.")

    def show_article_window(self, title, url):
        """Show article content in a new window."""
        print(f"Se preia conținutul articolului de la: {url}")
        content = self.fetch_full_article(url)
        def show_window():
            window = ArticleWindow(title, content, url)
            window.show()
        thread = threading.Thread(target=show_window)
        thread.start()

    def generate_relationship_graph(self, figsize=(12, 10), node_color='lightblue', font_size=8, font_weight='bold'):
        if self.data:
            G = nx.Graph()
            for keyword in self.keywords:
                G.add_node(keyword, node_type='keyword')
            for entry in self.data:
                url = entry['Link']
                title = entry['Title']
                keyword = entry['Keyword']
                polarity = entry['Polarity']
                weight = abs(polarity) if polarity != 0 else 0.1
                if not G.has_node(url):
                    G.add_node(url, node_type='content', title=title)
                G.add_edge(keyword, url, weight=weight, polarity=polarity)
            
            pos = nx.spring_layout(G, k=1.5)
            labels = {node: node if G.nodes[node]['node_type'] == 'keyword' else G.nodes[node]['title'] for node in G.nodes()}
            node_list = list(G.nodes())
            node_colors = ['red' if G.nodes[node]['node_type'] == 'keyword' else 'lightblue' for node in node_list]
            special_nodes = [node for node in node_list if G.nodes[node]['node_type'] == 'content' and G.degree(node) > 1]
            node_sizes = [5000 if G.nodes[node]['node_type'] == 'keyword' else 3000 for node in node_list]
            edge_widths = [G.edges[edge]['weight'] for edge in G.edges()]
            
            fig, ax = plt.subplots(figsize=figsize)
            nx.draw(G, pos, ax=ax, labels=labels, node_color=node_colors, node_size=node_sizes, font_size=font_size, font_weight=font_weight, width=edge_widths)
            
            def animate(frame):
                for node in special_nodes:
                    index = node_list.index(node)
                    if frame % 2 == 0:
                        node_colors[index] = 'red'
                    else:
                        node_colors[index] = 'yellow'
                ax.clear()
                nx.draw(G, pos, ax=ax, labels=labels, node_color=node_colors, node_size=node_sizes, font_size=font_size, font_weight=font_weight, width=edge_widths)
            
            anim = animation.FuncAnimation(fig, animate, interval=500, blit=False)
            
            def on_hover(event):
                if event.inaxes != ax:
                    return
                for node, (x, y) in pos.items():
                    if abs(event.xdata - x) < 0.05 and abs(event.ydata - y) < 0.05:
                        if G.nodes[node]['node_type'] == 'keyword':
                            annotation_text = f"Cuvânt cheie: {node}"
                        else:
                            title = G.nodes[node]['title']
                            keywords = list(G.neighbors(node))
                            polarities = [G.edges[(keyword, node)]['polarity'] for keyword in keywords]
                            annotation_text = f"Articol: {title}\nCuvinte cheie: {', '.join(keywords)}\nPolarități: {', '.join([f'{p:.2f}' for p in polarities])}\nSursă: {node}"
                        plt.annotate(annotation_text, (x, y), textcoords="offset points", xytext=(10, 10), ha='center', fontsize=10, bbox=dict(facecolor='yellow', alpha=0.8))
                        fig.canvas.draw_idle()
            
            def on_click(event):
                if event.inaxes != ax:
                    return
                for node, (x, y) in pos.items():
                    if abs(event.xdata - x) < 0.05 and abs(event.ydata - y) < 0.05:
                        if G.nodes[node]['node_type'] == 'content':
                            self.show_article_window(G.nodes[node]['title'], node)
            
            fig.canvas.mpl_connect('motion_notify_event', on_hover)
            fig.canvas.mpl_connect('button_press_event', on_click)
            
            self.draggable_nodes = []
            for node in G.nodes():
                draggable = DraggableNode(node, pos[node], G, ax, fig, pos, {'color': node_colors, 'size': node_sizes, 'font_size': font_size, 'font_weight': font_weight}, {'width': edge_widths})
                self.draggable_nodes.append(draggable)
            
            plt.figtext(0.02, 0.02, 
                       "Click și trage nodurile pentru a le repoziționa\nClick pe nodurile albastre pentru a vedea articolele", 
                       fontsize=8, 
                       bbox=dict(facecolor='white', alpha=0.8))
            
            plt.show()
        else:
            print("Nu există date pentru generarea graficului.")

    def run(self, filename="keyword_analysis.csv"):
        for keyword in self.keywords:
            json_content = self.fetch_results(keyword)
            self.parse_results(json_content, keyword)
        self.save_to_csv(filename)
        self.generate_relationship_graph()

if __name__ == "__main__":
    api_key = "ef215b7e7045440cbe5b841350c0afe9"
    print("Introduceți cuvintele cheie pentru analiză (separate prin virgulă):")
    user_input = input().strip()
    keywords = [k.strip() for k in user_input.split(',')]
    info_gatherer = InfoGatherer(api_key)
    info_gatherer.set_keywords(keywords)
    info_gatherer.run()
