from googlesearch import search

from datetime import datetime
import os
import requests

now = datetime.now()
date_time = now.strftime("%Y%m%d-%H%M%S")
os.makedirs(date_time, exist_ok=True)
print(f"o sa salvez in {date_time}")

query = input("search: ")
num = input("introduceti cate linkuri vreti sa fie extrase: ")

log_file = open(f'{date_time}/logs.txt', 'w')
results: list = search(query, num=int(num), start=0, stop=int(num), pause=5)
results_list = []
for url in results:
    log_file.write(f"{url}\n")
    results_list.append(url)


def download_url(url_to_download, index, folder):
    print(url_to_download)
    local_filename = f'{index}_'+url_to_download.split('/')[-1]
    local_path = os.path.join(folder, local_filename)
    r = requests.get(url_to_download)
    f = open(local_path, 'wb')
    for chunk in r.iter_content(chunk_size=512 * 1024):
        if chunk:  # filter out keep-alive new chunks
            f.write(chunk)
    f.close()
    return


print("am terminat de cautat, acum incep download")
ix = 0
for url in results_list:
    try:
        download_url(url, ix, date_time)
    except Exception as ignored:
        pass
    ix += 1
