04-Akansha commited on
Commit
d17dc9f
·
verified ·
1 Parent(s): 8988578

Upload Indian_CIVICS.py

Browse files
Files changed (1) hide show
  1. Indian_CIVICS.py +93 -0
Indian_CIVICS.py ADDED
@@ -0,0 +1,93 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+
2
+ from datasets import Dataset, Features, Value, Sequence
3
+ import json
4
+ import os
5
+
6
+ # --- 1. Configuration ---
7
+ JSON_FILE_PATH = "Indian_CIVICS_Dataset.json"
8
+ OUTPUT_DIR = "indian_civics_dataset_hf"
9
+
10
+ # --- 2. Define the Dataset Schema (Features) ---
11
+ # Defines the column names and their strict data types for the Hugging Face Dataset.
12
+
13
+ DATASET_FEATURES = Features(
14
+ {
15
+ "ID": Value(dtype="string"),
16
+ "Topic": Value(dtype="string"),
17
+ "Sub-Topic": Value(dtype="string"),
18
+ "Statement": Value(dtype="string"), # Original Indian Language text (Hindi/Telugu)
19
+ "Statement - Translation": Value(dtype="string"), # English translation
20
+ "Data Source": Value(dtype="string"),
21
+ "Data Producer Organization": Value(dtype="string"),
22
+ "Organization Type": Value(dtype="string"),
23
+ "Language": Value(dtype="string"),
24
+ "State/Region": Value(dtype="string"),
25
+ "Link": Value(dtype="string"), # Matches the 'Link' key in your JSON
26
+ # Sequence is used for a list of strings (e.g., ["Tag 1", "Tag 2"])
27
+ "Consensus Value Annotation": Sequence(feature=Value(dtype="string")),
28
+ }
29
+ )
30
+
31
+ # --- 3. Data Utility Functions ---
32
+
33
+ def load_data_from_json(file_path):
34
+ """Loads a list of dictionaries from a JSON file."""
35
+ if not os.path.exists(file_path):
36
+ raise FileNotFoundError(f"Error: JSON file not found at {file_path}")
37
+ # Use 'utf-8' encoding for correct handling of Hindi and Telugu characters
38
+ with open(file_path, 'r', encoding='utf-8') as f:
39
+ data_list = json.load(f)
40
+ return data_list
41
+
42
+ def convert_to_hf_format(data_list):
43
+ """Converts a list of dictionaries into a dictionary of lists (HF format)."""
44
+ if not data_list:
45
+ return {}
46
+
47
+ # Initialize the dictionary of lists based on the first item's keys
48
+ keys = data_list[0].keys()
49
+ hf_data = {key: [] for key in keys}
50
+
51
+ # Populate the lists
52
+ for item in data_list:
53
+ for key in keys:
54
+ hf_data[key].append(item.get(key))
55
+
56
+ return hf_data
57
+
58
+ # --- 4. Main Execution ---
59
+
60
+ if __name__ == "__main__":
61
+ try:
62
+ print(f"Loading data from: {JSON_FILE_PATH}...")
63
+
64
+ # Load the data (List of Dicts)
65
+ statement_list = load_data_from_json(JSON_FILE_PATH)
66
+ print(f"✅ Loaded {len(statement_list)} entries.")
67
+
68
+ # Transform the data structure (Dict of Lists)
69
+ hf_sample_data = convert_to_hf_format(statement_list)
70
+
71
+ # --- Create and Save the Dataset ---
72
+ dataset = Dataset.from_dict(
73
+ hf_sample_data,
74
+ features=DATASET_FEATURES # Apply the defined schema
75
+ )
76
+
77
+ os.makedirs(OUTPUT_DIR, exist_ok=True)
78
+ dataset.save_to_disk(OUTPUT_DIR)
79
+
80
+ print("-" * 50)
81
+ print(f"✅ Dataset successfully created.")
82
+ print(f"Total examples: {len(dataset)}")
83
+ print(f"Dataset saved locally to: ./{OUTPUT_DIR}")
84
+ print("-" * 50)
85
+ print("--- Sample Example (Index 0) ---")
86
+ print(dataset[0])
87
+ print("-" * 50)
88
+
89
+ except FileNotFoundError as e:
90
+ print(f"\nFATAL ERROR: {e}")
91
+ print(f"Please make sure the file '{JSON_FILE_PATH}' is in the same directory as '{os.path.basename(__file__)}'.")
92
+ except Exception as e:
93
+ print(f"\nAn unexpected error occurred during dataset creation: {e}")