Commit
·
01c612b
1
Parent(s):
572dd5c
Create app.py
Browse files
app.py
ADDED
|
@@ -0,0 +1,46 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import gradio as gr
|
| 2 |
+
from datasets import load_from_disk
|
| 3 |
+
import numpy as np
|
| 4 |
+
|
| 5 |
+
|
| 6 |
+
gender_labels = ['man', 'non-binary', 'woman', 'no_gender_specified', ]
|
| 7 |
+
|
| 8 |
+
ethnicity_labels = ['African-American', 'American_Indian', 'Black', 'Caucasian', 'East_Asian',
|
| 9 |
+
'First_Nations', 'Hispanic', 'Indigenous_American', 'Latino', 'Latinx',
|
| 10 |
+
'Multiracial', 'Native_American', 'Pacific_Islander', 'South_Asian',
|
| 11 |
+
'Southeast_Asian', 'White', 'no_ethnicity_specified']
|
| 12 |
+
models = ['DallE', 'SD_14', 'SD_2']
|
| 13 |
+
nos = [1,2,3,4,5,6,7,8,9,10]
|
| 14 |
+
|
| 15 |
+
ds = load_dataset("color-sorted")
|
| 16 |
+
|
| 17 |
+
def get_nearest_64(gender, ethnicity, model, no, index):
|
| 18 |
+
df = ds.remove_columns(["image","image_path"]).to_pandas()
|
| 19 |
+
ix = df.loc[(df['ethnicity'] == ethnicity) & (df['gender'] == gender) & (df['no'] == no) & (df['model'] == model)].index[0]
|
| 20 |
+
image = ds.select([index[ix][0]])["image"][0]
|
| 21 |
+
neighbors = ds.select(max(ix-10, 0), min(ix+10, len(ds)-1))
|
| 22 |
+
neighbor_images = neighbors["image"]
|
| 23 |
+
neighbor_captions = [caption.split("/")[-1] for caption in neighbors["image_path"]]
|
| 24 |
+
neighbor_captions = [' '.join(caption.split("_")[4:-3]) for caption in neighbor_captions]
|
| 25 |
+
neighbor_models = neighbors["model"]
|
| 26 |
+
neighbor_captions = [f"{a} {b}" for a,b in zip(neighbor_captions,neighbor_models)]
|
| 27 |
+
return image, list(zip(neighbor_images, neighbor_captions))
|
| 28 |
+
|
| 29 |
+
with gr.Blocks() as demo:
|
| 30 |
+
gr.Markdown("# Colorfulness Nearest Neighbors Explorer")
|
| 31 |
+
gr.Markdown("### TF-IDF index of the _identities_ dataset of images generated by 3 models using colorfulness")
|
| 32 |
+
gr.Markdown("#### Choose one of the generated identity images to see its nearest neighbors according to colorfulness")
|
| 33 |
+
with gr.Row():
|
| 34 |
+
with gr.Column():
|
| 35 |
+
model = gr.Radio(models, label="Model")
|
| 36 |
+
index = gr.Radio(indexes, label="Visual vocabulary size")
|
| 37 |
+
gender = gr.Radio(gender_labels, label="Gender label")
|
| 38 |
+
with gr.Column():
|
| 39 |
+
ethnicity = gr.Radio(ethnicity_labels, label="Ethnicity label")
|
| 40 |
+
no = gr.Radio(nos, label="Image number")
|
| 41 |
+
button = gr.Button(value="Get nearest neighbors")
|
| 42 |
+
with gr.Row():
|
| 43 |
+
image = gr.Image()
|
| 44 |
+
gallery = gr.Gallery().style(grid=4)
|
| 45 |
+
button.click(get_nearest_64, inputs=[gender, ethnicity, model, no, index], outputs=[image, gallery])
|
| 46 |
+
demo.launch()
|