from googlesearch import search


import os
import requests
import shutil

date_time = ("documente")
os.makedirs(date_time, exist_ok=True)
print(f"Cautator si analizator de {date_time}")

query = input("introduceti intrebarea sau termenii de cautare: ")
num = input("de la 1-100 cat de adanc vrei sa sap pentru aceasta informatie: ")


log_file_path = f'{date_time}/logs.txt'
log_file = open(log_file_path, 'w')
results: list = search(query, num=int(num), start=0, stop=int(num), pause=10)
results_list = []
for url in results:
    log_file.write(f"{url}\n")
    results_list.append(url)
log_file.flush()
log_file.close()

cwd = os.path.curdir
cwd_log_file = os.path.join(cwd, 'logs.txt')
print(cwd_log_file)
shutil.rmtree(cwd_log_file, ignore_errors=True)
shutil.copy2(log_file_path, cwd_log_file)


def download_url(url_to_download, index, folder):
    print(url_to_download)
    local_filename = f'{index}_'+url_to_download.split('/')[-1]
    local_path = os.path.join(folder, local_filename)
    r = requests.get(url_to_download)
    f = open(local_path, 'wb')
    for chunk in r.iter_content(chunk_size=512 * 1024):
        if chunk:  # filter out keep-alive new chunks
            f.write(chunk)
    f.close()
    return


print("am terminat de cautat, acum incep descarcarea documentelor")
ix = 0
for url in results_list:
    try:
        download_url(url, ix, date_time)
    except Exception as ignored:
        pass
    ix += 1
