labonny commited on
Commit
7ea1a95
1 Parent(s): 5b5ecce

Converting images to greyscale before predictions

Browse files
Files changed (3) hide show
  1. app.py +4 -2
  2. packages.txt +1 -0
  3. requirements.txt +1 -0
app.py CHANGED
@@ -1,15 +1,17 @@
1
  import gradio as gr
2
  from fastai.vision.all import *
 
3
 
4
  learn = load_learner('model-v4.pkl')
5
  labels = learn.dls.vocab
6
  def predict(img):
7
- pred,pred_idx,probs = learn.predict(img)
 
8
  return {labels[i]: float(probs[i]) for i in range(len(labels))}
9
 
10
  title = "Facial Expression Classifier"
11
  description = "A facial expression classifier, trained using the <a href='https://www.kaggle.com/datasets/msambare/fer2013'>FER-2013 dataset</a>. This dataset consists of 28,709 examples of faces: each one is 48x48 grayscale pixels and is labelled with one of the following expressions: anger, disgust, fear, happy, neutral, sad, surprise.<p><p>This was used to train a resnet34 model."
12
  examples = ["angryExample.jpg", "disgustExample.jpg", "fearExample.jpg", "happyExample.jpg", "neutralExample.jpg", "sadExample.jpg", "surpriseExample.jpg"]
13
- iface = gr.Interface(fn=predict, inputs=gr.inputs.Image(shape=(48,48)), outputs=gr.outputs.Label(num_top_classes=3), examples=examples, title=title, description=description)
14
  iface.launch()
15
 
 
1
  import gradio as gr
2
  from fastai.vision.all import *
3
+ import cv2
4
 
5
  learn = load_learner('model-v4.pkl')
6
  labels = learn.dls.vocab
7
  def predict(img):
8
+ image = cv2.cvtColor(img, cv2.COLOR_BGR2GRAY, dstCn=3 )
9
+ pred,pred_idx,probs = learn.predict(image)
10
  return {labels[i]: float(probs[i]) for i in range(len(labels))}
11
 
12
  title = "Facial Expression Classifier"
13
  description = "A facial expression classifier, trained using the <a href='https://www.kaggle.com/datasets/msambare/fer2013'>FER-2013 dataset</a>. This dataset consists of 28,709 examples of faces: each one is 48x48 grayscale pixels and is labelled with one of the following expressions: anger, disgust, fear, happy, neutral, sad, surprise.<p><p>This was used to train a resnet34 model."
14
  examples = ["angryExample.jpg", "disgustExample.jpg", "fearExample.jpg", "happyExample.jpg", "neutralExample.jpg", "sadExample.jpg", "surpriseExample.jpg"]
15
+ iface = gr.Interface(fn=predict, inputs=gr.inputs.Image(shape=(48,48)), outputs=gr.outputs.Label(num_top_classes=3), examples=examples, title=title, description=description,interpretation='default')
16
  iface.launch()
17
 
packages.txt ADDED
@@ -0,0 +1 @@
 
 
1
+ python3-opencv
requirements.txt CHANGED
@@ -1 +1,2 @@
1
  fastai
 
 
1
  fastai
2
+ opencv-python