from datasets import Dataset, concatenate_datasets
from huggingface_hub import login
import os
from datasets import load_dataset
from datasets import DownloadConfig


def update_db_hub(texts, topics, dates):
    api_token = os.getenv("hf_key")
    login(token=api_token)
    dataset_name = "Danielrahmai1991/row_data"

    new_rows = {
        'text': texts,
        "topic": topics,
        "date": dates
    }
    # print("new_rows", new_rows)
    new_dataset = Dataset.from_dict(new_rows)
    
    try:
        # Load the dataset (use_auth_token=True if it's private)
        dataset = load_dataset(dataset_name, download_config=DownloadConfig(token=api_token))
        # print("Dataset loaded successfully!", dataset)
        # print(dataset)
        updated_dataset = concatenate_datasets([dataset['train'], new_dataset])
    except Exception as e:
        updated_dataset = new_dataset
        print(f"Failed to load dataset: {e}")
    
    # Replace with your Space's repository name
    # Sample data

    print("updated_dataset", updated_dataset)
    # Push the updated dataset back to the hub
    try:
        updated_dataset.push_to_hub(dataset_name, private=True)  # Set private=False if it's not private
        print(f"Updated dataset pushed to the Hugging Face Hub: {dataset_name}")
    except Exception as e:
        print(f"Failed to push dataset: {e}")