Indian_Civics / Indian_CIVICS.py
04-Akansha's picture
Upload Indian_CIVICS.py
d17dc9f verified
from datasets import Dataset, Features, Value, Sequence
import json
import os
# --- 1. Configuration ---
JSON_FILE_PATH = "Indian_CIVICS_Dataset.json"
OUTPUT_DIR = "indian_civics_dataset_hf"
# --- 2. Define the Dataset Schema (Features) ---
# Defines the column names and their strict data types for the Hugging Face Dataset.
DATASET_FEATURES = Features(
{
"ID": Value(dtype="string"),
"Topic": Value(dtype="string"),
"Sub-Topic": Value(dtype="string"),
"Statement": Value(dtype="string"), # Original Indian Language text (Hindi/Telugu)
"Statement - Translation": Value(dtype="string"), # English translation
"Data Source": Value(dtype="string"),
"Data Producer Organization": Value(dtype="string"),
"Organization Type": Value(dtype="string"),
"Language": Value(dtype="string"),
"State/Region": Value(dtype="string"),
"Link": Value(dtype="string"), # Matches the 'Link' key in your JSON
# Sequence is used for a list of strings (e.g., ["Tag 1", "Tag 2"])
"Consensus Value Annotation": Sequence(feature=Value(dtype="string")),
}
)
# --- 3. Data Utility Functions ---
def load_data_from_json(file_path):
"""Loads a list of dictionaries from a JSON file."""
if not os.path.exists(file_path):
raise FileNotFoundError(f"Error: JSON file not found at {file_path}")
# Use 'utf-8' encoding for correct handling of Hindi and Telugu characters
with open(file_path, 'r', encoding='utf-8') as f:
data_list = json.load(f)
return data_list
def convert_to_hf_format(data_list):
"""Converts a list of dictionaries into a dictionary of lists (HF format)."""
if not data_list:
return {}
# Initialize the dictionary of lists based on the first item's keys
keys = data_list[0].keys()
hf_data = {key: [] for key in keys}
# Populate the lists
for item in data_list:
for key in keys:
hf_data[key].append(item.get(key))
return hf_data
# --- 4. Main Execution ---
if __name__ == "__main__":
try:
print(f"Loading data from: {JSON_FILE_PATH}...")
# Load the data (List of Dicts)
statement_list = load_data_from_json(JSON_FILE_PATH)
print(f"✅ Loaded {len(statement_list)} entries.")
# Transform the data structure (Dict of Lists)
hf_sample_data = convert_to_hf_format(statement_list)
# --- Create and Save the Dataset ---
dataset = Dataset.from_dict(
hf_sample_data,
features=DATASET_FEATURES # Apply the defined schema
)
os.makedirs(OUTPUT_DIR, exist_ok=True)
dataset.save_to_disk(OUTPUT_DIR)
print("-" * 50)
print(f"✅ Dataset successfully created.")
print(f"Total examples: {len(dataset)}")
print(f"Dataset saved locally to: ./{OUTPUT_DIR}")
print("-" * 50)
print("--- Sample Example (Index 0) ---")
print(dataset[0])
print("-" * 50)
except FileNotFoundError as e:
print(f"\nFATAL ERROR: {e}")
print(f"Please make sure the file '{JSON_FILE_PATH}' is in the same directory as '{os.path.basename(__file__)}'.")
except Exception as e:
print(f"\nAn unexpected error occurred during dataset creation: {e}")