linhcuem commited on
Commit
1db4b40
·
1 Parent(s): 8723b9c

Create app.py

Browse files
Files changed (1) hide show
  1. app.py +53 -0
app.py ADDED
@@ -0,0 +1,53 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import json
2
+ import gradio as gr
3
+ import yolov5
4
+ from PIL import Image
5
+ from huggingface_hub import hf_hub_download
6
+ import os
7
+
8
+
9
+
10
+ app_title = "Detect defects in bird nest jar"
11
+ models_ids = ['linhcuem/yolov5m_loi_kimloai']
12
+
13
+
14
+ current_model_id = models_ids[-1]
15
+ model = yolov5.load(current_model_id)
16
+ # model_yolov8 = YOLO(DEFAULT_DET_MODEL_ID_yolov8)
17
+ examples = [['test_images/16848942292986850-a2A2448-23gmBAS_40174045.jpg', 0.25, 'linhcuem/yolov5m_loi_kimloai'], ['test_images/16848942702263890-a2A2448-23gmBAS_40174045.jpg', 0.25, 'linhcuem/yolov5m_loi_kimloai'],['test_images/16848942702263890-a2A2448-23gmBAS_40174045.jpg', 0.25, 'linhcuem/yolov5m_loi_kimloai'],
18
+ ['test_images/16848948502951770-a2A2448-23gmBAS_40174045.jpg', 0.25, 'linhcuem/yolov5m_loi_kimloai']]
19
+ def predict(image, threshold=0.25, model_id=None):
20
+ #update model if required
21
+ global current_model_id
22
+ global model
23
+ if model_id != current_model_id:
24
+ model = yolov5.load(model_id)
25
+ # model_yolov8 = YOLO(DEFAULT_DET_MODEL_ID_yolov8)
26
+ current_model_id = model_id
27
+
28
+ # get model input size
29
+ config_path = hf_hub_download(repo_id=model_id, filename="config.json")
30
+ with open(config_path, "r") as f:
31
+ config = json.load(f)
32
+ input_size = config["input_size"]
33
+
34
+ #perform inference
35
+ model.conf = threshold
36
+ results = model(image, size=input_size)
37
+ numpy_image = results.render()[0]
38
+ output_image = Image.fromarray(numpy_image)
39
+ return output_image
40
+
41
+ gr.Interface(
42
+ title=app_title,
43
+ description="DO ANH DAT",
44
+ fn=predict,
45
+ inputs=[
46
+ gr.Image(type="pil"),
47
+ gr.Slider(maximum=1, step=0.01, value=0.25),
48
+ gr.Dropdown(models_ids, value=models_ids[-1]),
49
+ ],
50
+ outputs=gr.Image(type="pil"),
51
+ examples=examples,
52
+ cache_examples=True if examples else Fale,
53
+ ).launch(enable_queue=True)