Spaces:
Building
on
CPU Upgrade
Building
on
CPU Upgrade
Update app.py
Browse files
app.py
CHANGED
@@ -1,1667 +1,2 @@
|
|
1 |
import os
|
2 |
-
|
3 |
-
import base64
|
4 |
-
import requests
|
5 |
-
import tempfile
|
6 |
-
import shutil
|
7 |
-
import time
|
8 |
-
import numpy as np
|
9 |
-
import traceback
|
10 |
-
from typing import List, Tuple
|
11 |
-
from datetime import datetime, timedelta
|
12 |
-
from pathlib import Path
|
13 |
-
from io import BytesIO
|
14 |
-
from urllib.parse import urljoin
|
15 |
-
|
16 |
-
# Selenium ๊ด๋ จ
|
17 |
-
from selenium import webdriver
|
18 |
-
from selenium.webdriver.support.ui import WebDriverWait
|
19 |
-
from selenium.webdriver.support import expected_conditions as EC
|
20 |
-
from selenium.webdriver.common.by import By
|
21 |
-
from selenium.common.exceptions import WebDriverException, TimeoutException
|
22 |
-
|
23 |
-
# ์ด๋ฏธ์ง ์ฒ๋ฆฌ
|
24 |
-
from PIL import Image, ImageDraw, ImageFont
|
25 |
-
|
26 |
-
# Gradio
|
27 |
-
import gradio as gr
|
28 |
-
|
29 |
-
# HuggingFace
|
30 |
-
from huggingface_hub import InferenceClient
|
31 |
-
from dotenv import load_dotenv
|
32 |
-
|
33 |
-
# HTML ํ์ฑ
|
34 |
-
from bs4 import BeautifulSoup
|
35 |
-
|
36 |
-
# ์์ฑ ๋ฐ ๋น๋์ค ์ฒ๋ฆฌ
|
37 |
-
from gtts import gTTS
|
38 |
-
from moviepy.editor import (
|
39 |
-
VideoFileClip,
|
40 |
-
AudioFileClip,
|
41 |
-
ImageClip,
|
42 |
-
concatenate_videoclips
|
43 |
-
)
|
44 |
-
# ์๋จ์ import ์ถ๊ฐ
|
45 |
-
import textwrap
|
46 |
-
|
47 |
-
# .env ํ์ผ์์ ํ๊ฒฝ ๋ณ์ ๋ก๋
|
48 |
-
load_dotenv()
|
49 |
-
|
50 |
-
# HuggingFace ์ธํผ๋ฐ์ค ํด๋ผ์ด์ธํธ ์ค์
|
51 |
-
hf_client = InferenceClient(
|
52 |
-
"CohereForAI/c4ai-command-r-plus-08-2024",
|
53 |
-
token=os.getenv("HF_TOKEN")
|
54 |
-
)
|
55 |
-
# ์คํฌ๋ฆฐ์ท ์บ์ ๋๋ ํ ๋ฆฌ ์ค์
|
56 |
-
CACHE_DIR = Path("screenshot_cache")
|
57 |
-
CACHE_DIR.mkdir(exist_ok=True)
|
58 |
-
|
59 |
-
# ์ ์ญ ๋ณ์๋ก ์คํฌ๋ฆฐ์ท ์บ์ ์ ์ธ
|
60 |
-
SCREENSHOT_CACHE = {}
|
61 |
-
|
62 |
-
def get_cached_screenshot(url: str) -> str:
|
63 |
-
"""์บ์๋ ์คํฌ๋ฆฐ์ท ๊ฐ์ ธ์ค๊ธฐ ๋๋ ์๋ก ์์ฑ"""
|
64 |
-
try:
|
65 |
-
# URL์ ์์ ํ ํ์ผ๋ช
์ผ๋ก ๋ณํ
|
66 |
-
safe_filename = base64.urlsafe_b64encode(url.encode()).decode()
|
67 |
-
cache_file = CACHE_DIR / f"{safe_filename[:200]}.jpg" # PNG ๋์ JPG ์ฌ์ฉ
|
68 |
-
|
69 |
-
if cache_file.exists():
|
70 |
-
try:
|
71 |
-
with Image.open(cache_file) as img:
|
72 |
-
buffered = BytesIO()
|
73 |
-
img.save(buffered, format="JPEG", quality=85, optimize=True)
|
74 |
-
return base64.b64encode(buffered.getvalue()).decode()
|
75 |
-
except Exception as e:
|
76 |
-
print(f"Cache read error for {url}: {e}")
|
77 |
-
if cache_file.exists():
|
78 |
-
cache_file.unlink()
|
79 |
-
|
80 |
-
return take_screenshot(url)
|
81 |
-
|
82 |
-
except Exception as e:
|
83 |
-
print(f"Screenshot cache error for {url}: {e}")
|
84 |
-
return ""
|
85 |
-
|
86 |
-
def take_screenshot(url: str) -> str:
|
87 |
-
"""์น์ฌ์ดํธ ์คํฌ๋ฆฐ์ท ์ดฌ์"""
|
88 |
-
if not url.startswith('http'):
|
89 |
-
url = f"https://{url}"
|
90 |
-
|
91 |
-
options = webdriver.ChromeOptions()
|
92 |
-
options.add_argument('--headless')
|
93 |
-
options.add_argument('--no-sandbox')
|
94 |
-
options.add_argument('--disable-dev-shm-usage')
|
95 |
-
options.add_argument('--window-size=1080,720')
|
96 |
-
|
97 |
-
driver = None
|
98 |
-
try:
|
99 |
-
driver = webdriver.Chrome(options=options)
|
100 |
-
driver.get(url)
|
101 |
-
|
102 |
-
# ํ์ด์ง ๋ก๋ฉ ๋๊ธฐ
|
103 |
-
WebDriverWait(driver, 15).until(
|
104 |
-
EC.presence_of_element_located((By.TAG_NAME, "body"))
|
105 |
-
)
|
106 |
-
|
107 |
-
# ์ถ๊ฐ ๋๊ธฐ ์๊ฐ
|
108 |
-
time.sleep(3)
|
109 |
-
|
110 |
-
# ์คํฌ๋ฆฐ์ท ์ดฌ์ ๋ฐ ์ต์ ํ
|
111 |
-
screenshot = driver.get_screenshot_as_png()
|
112 |
-
img = Image.open(BytesIO(screenshot))
|
113 |
-
|
114 |
-
# ์ด๋ฏธ์ง ํฌ๊ธฐ ์ต์ ํ
|
115 |
-
max_size = (800, 600)
|
116 |
-
img.thumbnail(max_size, Image.Resampling.LANCZOS)
|
117 |
-
|
118 |
-
# JPEG๋ก ๋ณํ ๋ฐ ์ต์ ํ
|
119 |
-
if img.mode in ('RGBA', 'LA'):
|
120 |
-
background = Image.new('RGB', img.size, (255, 255, 255))
|
121 |
-
background.paste(img, mask=img.split()[-1])
|
122 |
-
img = background
|
123 |
-
|
124 |
-
# ์บ์ ์ ์ฅ
|
125 |
-
safe_filename = base64.urlsafe_b64encode(url.encode()).decode()
|
126 |
-
cache_file = CACHE_DIR / f"{safe_filename[:200]}.jpg"
|
127 |
-
img.save(cache_file, format="JPEG", quality=85, optimize=True)
|
128 |
-
|
129 |
-
# ๋ฐํ์ฉ ์ด๋ฏธ์ง ์์ฑ
|
130 |
-
buffered = BytesIO()
|
131 |
-
img.save(buffered, format="JPEG", quality=85, optimize=True)
|
132 |
-
return base64.b64encode(buffered.getvalue()).decode()
|
133 |
-
|
134 |
-
except Exception as e:
|
135 |
-
print(f"Screenshot error for {url}: {e}")
|
136 |
-
return ""
|
137 |
-
|
138 |
-
finally:
|
139 |
-
if driver:
|
140 |
-
driver.quit()
|
141 |
-
|
142 |
-
def cleanup_cache():
|
143 |
-
"""์บ์ ์ ๋ฆฌ"""
|
144 |
-
try:
|
145 |
-
current_time = time.time()
|
146 |
-
for cache_file in CACHE_DIR.glob("*.jpg"):
|
147 |
-
try:
|
148 |
-
# 24์๊ฐ ์ด์ ๋ ํ์ผ ๋๋ 0๋ฐ์ดํธ ํ์ผ ์ญ์
|
149 |
-
if (current_time - cache_file.stat().st_mtime > 86400) or cache_file.stat().st_size == 0:
|
150 |
-
cache_file.unlink()
|
151 |
-
except Exception as e:
|
152 |
-
print(f"Error cleaning cache file {cache_file}: {e}")
|
153 |
-
|
154 |
-
except Exception as e:
|
155 |
-
print(f"Cache cleanup error: {e}")
|
156 |
-
|
157 |
-
# ์ฑ ์์ ์ ์บ์ ์ ๋ฆฌ
|
158 |
-
cleanup_cache()
|
159 |
-
|
160 |
-
def calculate_rising_rate(created_date: str, rank: int) -> int:
|
161 |
-
"""AI Rising Rate ๊ณ์ฐ"""
|
162 |
-
# ์์ฑ์ผ ๊ธฐ์ค ์ ์ ๊ณ์ฐ
|
163 |
-
created = datetime.strptime(created_date.split('T')[0], '%Y-%m-%d')
|
164 |
-
today = datetime.now()
|
165 |
-
days_diff = (today - created).days
|
166 |
-
date_score = max(0, 300 - days_diff) # ์ต๋ 300์
|
167 |
-
|
168 |
-
# ์์ ๊ธฐ์ค ์ ์ ๊ณ์ฐ
|
169 |
-
rank_score = max(0, 600 - rank) # ์ต๋ 300์
|
170 |
-
|
171 |
-
# ์ด์ ๊ณ์ฐ
|
172 |
-
total_score = date_score + rank_score
|
173 |
-
|
174 |
-
# ๋ณ ๊ฐ์ ๊ณ์ฐ (0~5)
|
175 |
-
if total_score <= 200:
|
176 |
-
stars = 1
|
177 |
-
elif total_score <= 400:
|
178 |
-
stars = 2
|
179 |
-
elif total_score <= 600:
|
180 |
-
stars = 3
|
181 |
-
elif total_score <= 800:
|
182 |
-
stars = 4
|
183 |
-
else:
|
184 |
-
stars = 5
|
185 |
-
|
186 |
-
return stars
|
187 |
-
|
188 |
-
def get_popularity_grade(likes: int, stars: int) -> tuple:
|
189 |
-
"""AI Popularity Score ๋ฑ๊ธ ๊ณ์ฐ"""
|
190 |
-
# ๊ธฐ๋ณธ ์ ์ (likes)
|
191 |
-
base_score = min(likes, 10000) # ์ต๋ 10000์
|
192 |
-
|
193 |
-
# ๋ณ์ ์ถ๊ฐ ์ ์ (๋ณ ํ๋๋น 500์ )
|
194 |
-
star_score = stars * 1000
|
195 |
-
|
196 |
-
# ์ด์
|
197 |
-
total_score = base_score + star_score
|
198 |
-
|
199 |
-
# ๋ฑ๊ธ ํ
์ด๋ธ (18๋จ๊ณ)
|
200 |
-
grades = [
|
201 |
-
(14500, "AAA+"), (14000, "AAA"), (13500, "AAA-"),
|
202 |
-
(13000, "AA+"), (12500, "AA"), (12000, "AA-"),
|
203 |
-
(11500, "A+"), (11000, "A"), (10000, "A-"),
|
204 |
-
(9000, "BBB+"), (8000, "BBB"), (7000, "BBB-"),
|
205 |
-
(6000, "BB+"), (5000, "BB"), (4000, "BB-"),
|
206 |
-
(3000, "B+"), (2000, "B"), (1000, "B-")
|
207 |
-
]
|
208 |
-
|
209 |
-
for threshold, grade in grades:
|
210 |
-
if total_score >= threshold:
|
211 |
-
return grade, total_score
|
212 |
-
|
213 |
-
return "B-", total_score
|
214 |
-
|
215 |
-
# get_card ํจ์ ๋ด์ hardware_info ๋ถ๋ถ์ ๋ค์์ผ๋ก ๊ต์ฒด:
|
216 |
-
def get_rating_info(item: dict, index: int) -> str:
|
217 |
-
"""ํ๊ฐ ์ ๋ณด HTML ์์ฑ"""
|
218 |
-
created = item.get('createdAt', '').split('T')[0]
|
219 |
-
likes = int(str(item.get('likes', '0')).replace(',', ''))
|
220 |
-
|
221 |
-
# AI Rising Rate ๊ณ์ฐ
|
222 |
-
stars = calculate_rising_rate(created, index + 1)
|
223 |
-
star_html = "โ
" * stars + "โ" * (5 - stars) # ์ฑ์์ง ๋ณ๊ณผ ๋น ๋ณ ์กฐํฉ
|
224 |
-
|
225 |
-
# AI Popularity Score ๊ณ์ฐ
|
226 |
-
grade, score = get_popularity_grade(likes, stars)
|
227 |
-
|
228 |
-
# ๋ฑ๊ธ๋ณ ์์ ์ค์
|
229 |
-
grade_colors = {
|
230 |
-
'AAA': '#FFD700', 'AA': '#FFA500', 'A': '#FF4500',
|
231 |
-
'BBB': '#4169E1', 'BB': '#1E90FF', 'B': '#00BFFF'
|
232 |
-
}
|
233 |
-
grade_base = grade.rstrip('+-')
|
234 |
-
grade_color = grade_colors.get(grade_base, '#666666')
|
235 |
-
|
236 |
-
return f"""
|
237 |
-
<div style='
|
238 |
-
margin-top: 15px;
|
239 |
-
padding: 15px;
|
240 |
-
background: rgba(255,255,255,0.4);
|
241 |
-
border-radius: 10px;
|
242 |
-
font-size: 0.9em;
|
243 |
-
box-shadow: 0 2px 10px rgba(0,0,0,0.1);'>
|
244 |
-
<div style='
|
245 |
-
display: grid;
|
246 |
-
grid-template-columns: repeat(2, 1fr);
|
247 |
-
gap: 15px;'>
|
248 |
-
<div style='
|
249 |
-
color: #333;
|
250 |
-
display: flex;
|
251 |
-
flex-direction: column;
|
252 |
-
gap: 5px;'>
|
253 |
-
<span style='font-weight: bold;'>AI Rising Rate:</span>
|
254 |
-
<span style='
|
255 |
-
color: #FF8C00;
|
256 |
-
font-size: 1.4em;
|
257 |
-
letter-spacing: 2px;
|
258 |
-
text-shadow: 1px 1px 2px rgba(0,0,0,0.1);'>{star_html}</span>
|
259 |
-
</div>
|
260 |
-
<div style='
|
261 |
-
color: #333;
|
262 |
-
display: flex;
|
263 |
-
flex-direction: column;
|
264 |
-
gap: 5px;'>
|
265 |
-
<span style='font-weight: bold;'>AI Popularity Score:</span>
|
266 |
-
<span style='
|
267 |
-
font-size: 1.2em;
|
268 |
-
font-weight: bold;
|
269 |
-
color: {grade_color};
|
270 |
-
text-shadow: 1px 1px 2px rgba(0,0,0,0.1);'>{grade} ({score:,})</span>
|
271 |
-
</div>
|
272 |
-
</div>
|
273 |
-
</div>
|
274 |
-
"""
|
275 |
-
|
276 |
-
def get_hardware_info(item: dict) -> tuple:
|
277 |
-
"""ํ๋์จ์ด ์ ๋ณด ์ถ์ถ"""
|
278 |
-
try:
|
279 |
-
# runtime ์ ๋ณด ํ์ธ
|
280 |
-
runtime = item.get('runtime', {})
|
281 |
-
|
282 |
-
# CPU ์ ๋ณด ์ฒ๋ฆฌ
|
283 |
-
cpu_info = runtime.get('cpu', 'Standard')
|
284 |
-
|
285 |
-
# GPU ์ ๋ณด ์ฒ๋ฆฌ
|
286 |
-
gpu_info = "None"
|
287 |
-
if runtime.get('accelerator') == "gpu":
|
288 |
-
gpu_type = runtime.get('gpu', {}).get('name', '')
|
289 |
-
gpu_memory = runtime.get('gpu', {}).get('memory', '')
|
290 |
-
if gpu_type:
|
291 |
-
gpu_info = f"{gpu_type}"
|
292 |
-
if gpu_memory:
|
293 |
-
gpu_info += f" ({gpu_memory}GB)"
|
294 |
-
|
295 |
-
# spaces decorator ํ์ธ
|
296 |
-
if '@spaces.GPU' in str(item.get('sdk_version', '')):
|
297 |
-
if gpu_info == "None":
|
298 |
-
gpu_info = "GPU Enabled"
|
299 |
-
|
300 |
-
# SDK ์ ๋ณด ์ฒ๋ฆฌ
|
301 |
-
sdk = item.get('sdk', 'N/A')
|
302 |
-
|
303 |
-
print(f"Debug - Runtime Info: {runtime}") # ๋๋ฒ๊ทธ ์ถ๋ ฅ
|
304 |
-
print(f"Debug - GPU Info: {gpu_info}") # ๋๋ฒ๊ทธ ์ถ๋ ฅ
|
305 |
-
|
306 |
-
return cpu_info, gpu_info, sdk
|
307 |
-
|
308 |
-
except Exception as e:
|
309 |
-
print(f"Error parsing hardware info: {str(e)}")
|
310 |
-
return 'Standard', 'None', 'N/A'
|
311 |
-
|
312 |
-
def get_card(item: dict, index: int, card_type: str = "space") -> str:
|
313 |
-
"""ํตํฉ ์นด๋ HTML ์์ฑ"""
|
314 |
-
item_id = item.get('id', '')
|
315 |
-
author, title = item_id.split('/', 1)
|
316 |
-
likes = format(item.get('likes', 0), ',')
|
317 |
-
created = item.get('createdAt', '').split('T')[0]
|
318 |
-
|
319 |
-
# short_description ๊ฐ์ ธ์ค๊ธฐ
|
320 |
-
short_description = item.get('cardData', {}).get('short_description', '')
|
321 |
-
|
322 |
-
# URL ์ ์
|
323 |
-
if card_type == "space":
|
324 |
-
url = f"https://huggingface.co/spaces/{item_id}"
|
325 |
-
elif card_type == "model":
|
326 |
-
url = f"https://huggingface.co/{item_id}"
|
327 |
-
else: # dataset
|
328 |
-
url = f"https://huggingface.co/datasets/{item_id}"
|
329 |
-
|
330 |
-
# ๋ฉํ๋ฐ์ดํฐ ์ฒ๋ฆฌ
|
331 |
-
tags = item.get('tags', [])
|
332 |
-
pipeline_tag = item.get('pipeline_tag', '')
|
333 |
-
license = item.get('license', '')
|
334 |
-
sdk = item.get('sdk', 'N/A')
|
335 |
-
|
336 |
-
# AI Rating ์ ๋ณด ๊ฐ์ ธ์ค๊ธฐ
|
337 |
-
rating_info = get_rating_info(item, index)
|
338 |
-
|
339 |
-
# ์นด๋ ํ์
๋ณ ๊ทธ๋ผ๋ฐ์ด์
์ค์
|
340 |
-
if card_type == "space":
|
341 |
-
gradient_colors = """
|
342 |
-
rgba(255, 182, 193, 0.7), /* ํ์คํ
ํํฌ */
|
343 |
-
rgba(173, 216, 230, 0.7), /* ํ์คํ
๋ธ๋ฃจ */
|
344 |
-
rgba(255, 218, 185, 0.7) /* ํ์คํ
ํผ์น */
|
345 |
-
"""
|
346 |
-
bg_content = f"""
|
347 |
-
background-image: url(data:image/png;base64,{get_cached_screenshot(url) if get_cached_screenshot(url) else ''});
|
348 |
-
background-size: cover;
|
349 |
-
background-position: center;
|
350 |
-
"""
|
351 |
-
type_icon = "๐ฏ"
|
352 |
-
type_label = "SPACE"
|
353 |
-
elif card_type == "model":
|
354 |
-
gradient_colors = """
|
355 |
-
rgba(110, 142, 251, 0.7), /* ๋ชจ๋ธ ๋ธ๋ฃจ */
|
356 |
-
rgba(130, 158, 251, 0.7),
|
357 |
-
rgba(150, 174, 251, 0.7)
|
358 |
-
"""
|
359 |
-
bg_content = f"""
|
360 |
-
background: linear-gradient(135deg, #6e8efb, #4a6cf7);
|
361 |
-
padding: 15px;
|
362 |
-
"""
|
363 |
-
type_icon = "๐ค"
|
364 |
-
type_label = "MODEL"
|
365 |
-
else: # dataset
|
366 |
-
gradient_colors = """
|
367 |
-
rgba(255, 107, 107, 0.7), /* ๋ฐ์ดํฐ์
๋ ๋ */
|
368 |
-
rgba(255, 127, 127, 0.7),
|
369 |
-
rgba(255, 147, 147, 0.7)
|
370 |
-
"""
|
371 |
-
bg_content = f"""
|
372 |
-
background: linear-gradient(135deg, #ff6b6b, #ff8787);
|
373 |
-
padding: 15px;
|
374 |
-
"""
|
375 |
-
type_icon = "๐"
|
376 |
-
type_label = "DATASET"
|
377 |
-
|
378 |
-
content_bg = f"""
|
379 |
-
background: linear-gradient(135deg, {gradient_colors});
|
380 |
-
backdrop-filter: blur(10px);
|
381 |
-
"""
|
382 |
-
|
383 |
-
# ํ๊ทธ ํ์ (models์ datasets์ฉ)
|
384 |
-
tags_html = ""
|
385 |
-
if card_type != "space":
|
386 |
-
tags_html = f"""
|
387 |
-
<div style='
|
388 |
-
position: absolute;
|
389 |
-
top: 50%;
|
390 |
-
left: 50%;
|
391 |
-
transform: translate(-50%, -50%);
|
392 |
-
display: flex;
|
393 |
-
flex-wrap: wrap;
|
394 |
-
gap: 5px;
|
395 |
-
justify-content: center;
|
396 |
-
width: 90%;'>
|
397 |
-
{' '.join([f'''
|
398 |
-
<span style='
|
399 |
-
background: rgba(255,255,255,0.2);
|
400 |
-
padding: 5px 10px;
|
401 |
-
border-radius: 15px;
|
402 |
-
color: white;
|
403 |
-
font-size: 0.8em;'>
|
404 |
-
#{tag}
|
405 |
-
</span>
|
406 |
-
''' for tag in tags[:5]])}
|
407 |
-
</div>
|
408 |
-
"""
|
409 |
-
|
410 |
-
# ์นด๋ HTML ๋ฐํ
|
411 |
-
return f"""
|
412 |
-
<div class="card" style='
|
413 |
-
position: relative;
|
414 |
-
border: none;
|
415 |
-
padding: 0;
|
416 |
-
margin: 10px;
|
417 |
-
border-radius: 20px;
|
418 |
-
box-shadow: 0 10px 20px rgba(0,0,0,0.1);
|
419 |
-
background: white;
|
420 |
-
transition: all 0.3s ease;
|
421 |
-
overflow: hidden;
|
422 |
-
min-height: 400px;
|
423 |
-
cursor: pointer;
|
424 |
-
transform-origin: center;'
|
425 |
-
onmouseover="this.style.transform='scale(0.98) translateY(5px)'; this.style.boxShadow='0 5px 15px rgba(0,0,0,0.2)';"
|
426 |
-
onmouseout="this.style.transform='scale(1) translateY(0)'; this.style.boxShadow='0 10px 20px rgba(0,0,0,0.1)';"
|
427 |
-
onclick="window.open('{url}', '_blank')">
|
428 |
-
|
429 |
-
<!-- ์๋จ ์์ญ -->
|
430 |
-
<div style='
|
431 |
-
width: 100%;
|
432 |
-
height: 200px;
|
433 |
-
{bg_content}
|
434 |
-
position: relative;'>
|
435 |
-
|
436 |
-
<!-- ์์ ๋ฑ์ง -->
|
437 |
-
<div style='
|
438 |
-
position: absolute;
|
439 |
-
top: 10px;
|
440 |
-
left: 10px;
|
441 |
-
background: rgba(0,0,0,0.7);
|
442 |
-
color: white;
|
443 |
-
padding: 5px 15px;
|
444 |
-
border-radius: 20px;
|
445 |
-
font-weight: bold;
|
446 |
-
font-size: 0.9em;
|
447 |
-
backdrop-filter: blur(5px);'>
|
448 |
-
#{index + 1}
|
449 |
-
</div>
|
450 |
-
|
451 |
-
<!-- ํ์
๋ฑ์ง -->
|
452 |
-
<div style='
|
453 |
-
position: absolute;
|
454 |
-
top: 10px;
|
455 |
-
right: 10px;
|
456 |
-
background: rgba(255,255,255,0.9);
|
457 |
-
padding: 5px 15px;
|
458 |
-
border-radius: 20px;
|
459 |
-
font-weight: bold;
|
460 |
-
font-size: 0.8em;'>
|
461 |
-
{type_icon} {type_label}
|
462 |
-
</div>
|
463 |
-
|
464 |
-
{tags_html}
|
465 |
-
</div>
|
466 |
-
|
467 |
-
<!-- ์ฝํ
์ธ ์์ญ -->
|
468 |
-
<div style='
|
469 |
-
padding: 20px;
|
470 |
-
{content_bg}
|
471 |
-
border-radius: 0 0 20px 20px;
|
472 |
-
border-top: 1px solid rgba(255,255,255,0.5);'>
|
473 |
-
<h3 style='
|
474 |
-
margin: 0 0 15px 0;
|
475 |
-
color: #333;
|
476 |
-
font-size: 1.3em;
|
477 |
-
line-height: 1.4;
|
478 |
-
display: -webkit-box;
|
479 |
-
-webkit-line-clamp: 2;
|
480 |
-
-webkit-box-orient: vertical;
|
481 |
-
overflow: hidden;
|
482 |
-
text-overflow: ellipsis;
|
483 |
-
text-shadow: 1px 1px 1px rgba(255,255,255,0.8);'>
|
484 |
-
{title}
|
485 |
-
</h3>
|
486 |
-
|
487 |
-
{f'''
|
488 |
-
<!-- Short Description (Space ์นด๋์๋ง ํ์) -->
|
489 |
-
<div style='
|
490 |
-
margin: 0 0 15px 0;
|
491 |
-
color: #444;
|
492 |
-
font-size: 0.9em;
|
493 |
-
line-height: 1.5;
|
494 |
-
display: -webkit-box;
|
495 |
-
-webkit-line-clamp: 3;
|
496 |
-
-webkit-box-orient: vertical;
|
497 |
-
overflow: hidden;
|
498 |
-
text-overflow: ellipsis;
|
499 |
-
background: rgba(255,255,255,0.4);
|
500 |
-
padding: 10px;
|
501 |
-
border-radius: 8px;'>
|
502 |
-
{short_description}
|
503 |
-
</div>
|
504 |
-
''' if card_type == "space" and short_description else ''}
|
505 |
-
|
506 |
-
<div style='
|
507 |
-
display: grid;
|
508 |
-
grid-template-columns: repeat(2, 1fr);
|
509 |
-
gap: 10px;
|
510 |
-
font-size: 0.9em;
|
511 |
-
background: rgba(255,255,255,0.3);
|
512 |
-
padding: 10px;
|
513 |
-
border-radius: 10px;'>
|
514 |
-
<div style='color: #444;'>
|
515 |
-
<span style='margin-right: 5px;'>๐ค</span> {author}
|
516 |
-
</div>
|
517 |
-
<div style='color: #444;'>
|
518 |
-
<span style='margin-right: 5px;'>โค๏ธ</span> {likes}
|
519 |
-
</div>
|
520 |
-
<div style='color: #444; grid-column: span 2;'>
|
521 |
-
<span style='margin-right: 5px;'>๐
</span> {created}
|
522 |
-
</div>
|
523 |
-
</div>
|
524 |
-
|
525 |
-
{rating_info}
|
526 |
-
</div>
|
527 |
-
</div>
|
528 |
-
"""
|
529 |
-
|
530 |
-
def get_trending_spaces(search_query="", sort_by="rank", progress=gr.Progress()) -> Tuple[str, str]:
|
531 |
-
"""ํธ๋ ๋ฉ ์คํ์ด์ค ๊ฐ์ ธ์ค๊ธฐ"""
|
532 |
-
url = "https://huggingface.co/api/spaces"
|
533 |
-
|
534 |
-
try:
|
535 |
-
progress(0, desc="Fetching spaces data...")
|
536 |
-
params = {
|
537 |
-
'full': 'true',
|
538 |
-
'limit': 24
|
539 |
-
}
|
540 |
-
|
541 |
-
response = requests.get(url, params=params)
|
542 |
-
response.raise_for_status()
|
543 |
-
spaces = response.json()
|
544 |
-
|
545 |
-
# ๊ฒ์์ด๋ก ํํฐ๋ง
|
546 |
-
if search_query:
|
547 |
-
spaces = [space for space in spaces if search_query.lower() in
|
548 |
-
(space.get('id', '') + ' ' + space.get('title', '')).lower()]
|
549 |
-
|
550 |
-
# ์ ๋ ฌ
|
551 |
-
sort_by = sort_by.lower()
|
552 |
-
if sort_by == "rising_rate":
|
553 |
-
spaces.sort(key=lambda x: calculate_rising_rate(x.get('createdAt', ''), 0), reverse=True)
|
554 |
-
elif sort_by == "popularity":
|
555 |
-
spaces.sort(key=lambda x: get_popularity_grade(
|
556 |
-
int(str(x.get('likes', '0')).replace(',', '')),
|
557 |
-
calculate_rising_rate(x.get('createdAt', ''), 0))[1],
|
558 |
-
reverse=True)
|
559 |
-
|
560 |
-
progress(0.1, desc="Creating gallery...")
|
561 |
-
html_content = """
|
562 |
-
<div style='padding: 20px; background: #f5f5f5;'>
|
563 |
-
<div style='display: grid; grid-template-columns: repeat(auto-fill, minmax(300px, 1fr)); gap: 20px;'>
|
564 |
-
"""
|
565 |
-
|
566 |
-
for idx, space in enumerate(spaces):
|
567 |
-
html_content += get_card(space, idx, "space")
|
568 |
-
progress((0.1 + 0.9 * idx/len(spaces)), desc=f"Loading space {idx+1}/{len(spaces)}...")
|
569 |
-
|
570 |
-
html_content += "</div></div>"
|
571 |
-
|
572 |
-
progress(1.0, desc="Complete!")
|
573 |
-
return html_content, f"Found {len(spaces)} spaces"
|
574 |
-
|
575 |
-
except Exception as e:
|
576 |
-
error_html = f'<div style="color: red; padding: 20px;">Error: {str(e)}</div>'
|
577 |
-
return error_html, f"Error: {str(e)}"
|
578 |
-
|
579 |
-
def get_models(search_query="", sort_by="rank", progress=gr.Progress()) -> Tuple[str, str]:
|
580 |
-
"""์ธ๊ธฐ ๋ชจ๋ธ ๊ฐ์ ธ์ค๊ธฐ"""
|
581 |
-
url = "https://huggingface.co/api/models"
|
582 |
-
|
583 |
-
try:
|
584 |
-
progress(0, desc="Fetching models data...")
|
585 |
-
params = {
|
586 |
-
'full': 'true',
|
587 |
-
'limit': 300
|
588 |
-
}
|
589 |
-
response = requests.get(url, params=params)
|
590 |
-
response.raise_for_status()
|
591 |
-
models = response.json()
|
592 |
-
|
593 |
-
# ๊ฒ์์ด๋ก ํํฐ๋ง
|
594 |
-
if search_query:
|
595 |
-
models = [model for model in models if search_query.lower() in
|
596 |
-
(model.get('id', '') + ' ' + model.get('title', '')).lower()]
|
597 |
-
|
598 |
-
# ์ ๋ ฌ
|
599 |
-
sort_by = sort_by.lower()
|
600 |
-
if sort_by == "rising_rate":
|
601 |
-
models.sort(key=lambda x: calculate_rising_rate(x.get('createdAt', ''), 0), reverse=True)
|
602 |
-
elif sort_by == "popularity":
|
603 |
-
models.sort(key=lambda x: get_popularity_grade(
|
604 |
-
int(str(x.get('likes', '0')).replace(',', '')),
|
605 |
-
calculate_rising_rate(x.get('createdAt', ''), 0))[1],
|
606 |
-
reverse=True)
|
607 |
-
|
608 |
-
progress(0.1, desc="Creating gallery...")
|
609 |
-
html_content = """
|
610 |
-
<div style='padding: 20px; background: #f5f5f5;'>
|
611 |
-
<div style='display: grid; grid-template-columns: repeat(auto-fill, minmax(300px, 1fr)); gap: 20px;'>
|
612 |
-
"""
|
613 |
-
|
614 |
-
for idx, model in enumerate(models):
|
615 |
-
html_content += get_card(model, idx, "model")
|
616 |
-
progress((0.1 + 0.9 * idx/len(models)), desc=f"Loading model {idx+1}/{len(models)}...")
|
617 |
-
|
618 |
-
html_content += "</div></div>"
|
619 |
-
|
620 |
-
progress(1.0, desc="Complete!")
|
621 |
-
return html_content, f"Found {len(models)} models"
|
622 |
-
|
623 |
-
except Exception as e:
|
624 |
-
error_html = f'<div style="color: red; padding: 20px;">Error: {str(e)}</div>'
|
625 |
-
return error_html, f"Error: {str(e)}"
|
626 |
-
|
627 |
-
def get_datasets(search_query="", sort_by="rank", progress=gr.Progress()) -> Tuple[str, str]:
|
628 |
-
"""์ธ๊ธฐ ๋ฐ์ดํฐ์
๊ฐ์ ธ์ค๊ธฐ"""
|
629 |
-
url = "https://huggingface.co/api/datasets"
|
630 |
-
|
631 |
-
try:
|
632 |
-
progress(0, desc="Fetching datasets data...")
|
633 |
-
params = {
|
634 |
-
'full': 'true',
|
635 |
-
'limit': 300
|
636 |
-
}
|
637 |
-
response = requests.get(url, params=params)
|
638 |
-
response.raise_for_status()
|
639 |
-
datasets = response.json()
|
640 |
-
|
641 |
-
# ๊ฒ์์ด๋ก ํํฐ๋ง
|
642 |
-
if search_query:
|
643 |
-
datasets = [dataset for dataset in datasets if search_query.lower() in
|
644 |
-
(dataset.get('id', '') + ' ' + dataset.get('title', '')).lower()]
|
645 |
-
|
646 |
-
# ์ ๋ ฌ
|
647 |
-
sort_by = sort_by.lower()
|
648 |
-
if sort_by == "rising_rate":
|
649 |
-
datasets.sort(key=lambda x: calculate_rising_rate(x.get('createdAt', ''), 0), reverse=True)
|
650 |
-
elif sort_by == "popularity":
|
651 |
-
datasets.sort(key=lambda x: get_popularity_grade(
|
652 |
-
int(str(x.get('likes', '0')).replace(',', '')),
|
653 |
-
calculate_rising_rate(x.get('createdAt', ''), 0))[1],
|
654 |
-
reverse=True)
|
655 |
-
|
656 |
-
progress(0.1, desc="Creating gallery...")
|
657 |
-
html_content = """
|
658 |
-
<div style='padding: 20px; background: #f5f5f5;'>
|
659 |
-
<div style='display: grid; grid-template-columns: repeat(auto-fill, minmax(300px, 1fr)); gap: 20px;'>
|
660 |
-
"""
|
661 |
-
|
662 |
-
for idx, dataset in enumerate(datasets):
|
663 |
-
html_content += get_card(dataset, idx, "dataset")
|
664 |
-
progress((0.1 + 0.9 * idx/len(datasets)), desc=f"Loading dataset {idx+1}/{len(datasets)}...")
|
665 |
-
|
666 |
-
html_content += "</div></div>"
|
667 |
-
|
668 |
-
progress(1.0, desc="Complete!")
|
669 |
-
return html_content, f"Found {len(datasets)} datasets"
|
670 |
-
|
671 |
-
except Exception as e:
|
672 |
-
error_html = f'<div style="color: red; padding: 20px;">Error: {str(e)}</div>'
|
673 |
-
return error_html, f"Error: {str(e)}"
|
674 |
-
|
675 |
-
# ์ ๋ ฌ ํจ์ ์ถ๊ฐ
|
676 |
-
def sort_items(items, sort_by):
|
677 |
-
if sort_by == "rank":
|
678 |
-
return items # ์ด๋ฏธ ์์๋๋ก ์ ๋ ฌ๋์ด ์์
|
679 |
-
elif sort_by == "rising_rate":
|
680 |
-
return sorted(items, key=lambda x: calculate_rising_rate(x.get('createdAt', ''), 0), reverse=True)
|
681 |
-
elif sort_by == "popularity":
|
682 |
-
return sorted(items, key=lambda x: get_popularity_grade(int(str(x.get('likes', '0')).replace(',', '')),
|
683 |
-
calculate_rising_rate(x.get('createdAt', ''), 0))[1], reverse=True)
|
684 |
-
return items
|
685 |
-
|
686 |
-
# API ํธ์ถ ํจ์ ์์
|
687 |
-
def fetch_items(item_type, search_query="", sort_by="rank", limit=1000):
|
688 |
-
"""์์ดํ
๊ฐ์ ธ์ค๊ธฐ (spaces/models/datasets)"""
|
689 |
-
base_url = f"https://huggingface.co/api/{item_type}"
|
690 |
-
params = {
|
691 |
-
'full': 'true',
|
692 |
-
'limit': limit,
|
693 |
-
'search': search_query
|
694 |
-
}
|
695 |
-
|
696 |
-
try:
|
697 |
-
response = requests.get(base_url, params=params)
|
698 |
-
response.raise_for_status()
|
699 |
-
items = response.json()
|
700 |
-
|
701 |
-
# ๊ฒ์์ด๋ก ํํฐ๋ง
|
702 |
-
if search_query:
|
703 |
-
items = [item for item in items if search_query.lower() in
|
704 |
-
(item.get('id', '') + item.get('title', '')).lower()]
|
705 |
-
|
706 |
-
# ์ ๋ ฌ
|
707 |
-
items = sort_items(items, sort_by)
|
708 |
-
|
709 |
-
return items[:300] # ์์ 300๊ฐ๋ง ๋ฐํ
|
710 |
-
except Exception as e:
|
711 |
-
print(f"Error fetching items: {e}")
|
712 |
-
return []
|
713 |
-
|
714 |
-
|
715 |
-
def get_space_source(space_id: str) -> dict:
|
716 |
-
"""์คํ์ด์ค์ ์์ค์ฝ๋ ๊ฐ์ ธ์ค๊ธฐ"""
|
717 |
-
try:
|
718 |
-
headers = {"Authorization": f"Bearer {os.getenv('HF_TOKEN')}"}
|
719 |
-
files_to_try = [
|
720 |
-
'app.py',
|
721 |
-
'index.html',
|
722 |
-
'app.js',
|
723 |
-
'main.py',
|
724 |
-
'streamlit_app.py',
|
725 |
-
'gradio_ui.py'
|
726 |
-
]
|
727 |
-
|
728 |
-
source = {}
|
729 |
-
for file in files_to_try:
|
730 |
-
url = f"https://huggingface.co/spaces/{space_id}/raw/main/{file}"
|
731 |
-
response = requests.get(url, headers=headers)
|
732 |
-
if response.status_code == 200:
|
733 |
-
source[file] = response.text
|
734 |
-
|
735 |
-
return source if source else {"app.py": "", "index.html": ""}
|
736 |
-
|
737 |
-
except Exception as e:
|
738 |
-
print(f"Error fetching source for {space_id}: {str(e)}")
|
739 |
-
return {"app.py": "", "index.html": ""}
|
740 |
-
|
741 |
-
def analyze_spaces(progress=gr.Progress()):
|
742 |
-
"""์คํ์ด์ค ๋ถ์ ๋ฐ HTML ์์ฑ"""
|
743 |
-
try:
|
744 |
-
url = "https://huggingface.co/api/spaces"
|
745 |
-
response = requests.get(url, params={'full': 'true', 'limit': 24})
|
746 |
-
response.raise_for_status()
|
747 |
-
spaces = response.json()[:24]
|
748 |
-
|
749 |
-
html_content = "<div style='padding: 20px;'>"
|
750 |
-
|
751 |
-
for idx, space in enumerate(spaces):
|
752 |
-
progress((idx + 1) / 24, desc=f"๋ถ์ ์ค... {idx+1}/24")
|
753 |
-
try:
|
754 |
-
# ์คํฌ๋ฆฐ์ท ์ฒ๋ฆฌ
|
755 |
-
space_url = f"https://huggingface.co/spaces/{space['id']}"
|
756 |
-
screenshot_base64 = get_cached_screenshot(space_url)
|
757 |
-
|
758 |
-
# ํ
์คํธ ๋ถ์
|
759 |
-
project_name = space['id'].split('/')[-1]
|
760 |
-
source = get_space_source(space['id'])
|
761 |
-
source_code = source["app.py"] or source["index.html"]
|
762 |
-
|
763 |
-
prompt = f"""
|
764 |
-
๋ค์ ์คํ์ด์ค๋ฅผ ๊ฐ๋จํ ์ค๋ช
ํด์ฃผ์ธ์:
|
765 |
-
์คํ์ด์ค ์ด๋ฆ: {project_name}
|
766 |
-
์์: {idx + 1}์
|
767 |
-
|
768 |
-
๋ค์ ํ์์ผ๋ก ์์ฑ:
|
769 |
-
1. ์์์ ์ด๋ฆ ์๊ฐ
|
770 |
-
2. ์ฃผ์ ๊ธฐ๋ฅ ์ค๋ช
|
771 |
-
3. ํน์ง์ ์ธ ์ฅ์
|
772 |
-
"""
|
773 |
-
|
774 |
-
messages = [
|
775 |
-
{"role": "system", "content": "๊ฐ๋จ๋ช
๋ฃํ ์ค๋ช
์ ์ ๊ณตํ๋ ๋ฆฌํฌํฐ์
๋๋ค."},
|
776 |
-
{"role": "user", "content": prompt}
|
777 |
-
]
|
778 |
-
|
779 |
-
response = hf_client.chat_completion(
|
780 |
-
messages,
|
781 |
-
max_tokens=150,
|
782 |
-
temperature=0.7
|
783 |
-
)
|
784 |
-
|
785 |
-
analysis = response.choices[0].message.content.strip()
|
786 |
-
|
787 |
-
# HTML ์นด๋ ์์ฑ
|
788 |
-
html_content += f"""
|
789 |
-
<div style='
|
790 |
-
background: white;
|
791 |
-
border-radius: 10px;
|
792 |
-
padding: 20px;
|
793 |
-
margin-bottom: 20px;
|
794 |
-
box-shadow: 0 2px 4px rgba(0,0,0,0.1);'>
|
795 |
-
<div style='display: flex; gap: 20px;'>
|
796 |
-
<div style='flex: 2;'>
|
797 |
-
<h3>Space #{idx + 1}</h3>
|
798 |
-
<textarea style='
|
799 |
-
width: 100%;
|
800 |
-
min-height: 100px;
|
801 |
-
padding: 10px;
|
802 |
-
border: 1px solid #ddd;
|
803 |
-
border-radius: 5px;
|
804 |
-
margin-top: 10px;'
|
805 |
-
>{analysis}</textarea>
|
806 |
-
</div>
|
807 |
-
<div style='flex: 1;'>
|
808 |
-
{f'<img src="data:image/jpeg;base64,{screenshot_base64}" style="width: 100%; border-radius: 5px;">' if screenshot_base64 else ''}
|
809 |
-
</div>
|
810 |
-
</div>
|
811 |
-
</div>
|
812 |
-
"""
|
813 |
-
|
814 |
-
except Exception as e:
|
815 |
-
print(f"Error processing space {space['id']}: {e}")
|
816 |
-
html_content += f"""
|
817 |
-
<div style='
|
818 |
-
background: white;
|
819 |
-
border-radius: 10px;
|
820 |
-
padding: 20px;
|
821 |
-
margin-bottom: 20px;
|
822 |
-
box-shadow: 0 2px 4px rgba(0,0,0,0.1);'>
|
823 |
-
<h3>Space #{idx + 1}</h3>
|
824 |
-
<p>๋ถ์์ ์ค๋น์ค์
๋๋ค.</p>
|
825 |
-
</div>
|
826 |
-
"""
|
827 |
-
|
828 |
-
html_content += "</div>"
|
829 |
-
return html_content
|
830 |
-
|
831 |
-
except Exception as e:
|
832 |
-
print(f"Analysis error: {e}")
|
833 |
-
return "<div style='color: red; padding: 20px;'>๋ถ์ ์ค ์ค๋ฅ๊ฐ ๋ฐ์ํ์ต๋๋ค.</div>"
|
834 |
-
|
835 |
-
|
836 |
-
|
837 |
-
def analyze_top_spaces(progress=gr.Progress()) -> Tuple[str, str]:
|
838 |
-
"""์์ 24๊ฐ ์คํ์ด์ค ๋ถ์"""
|
839 |
-
try:
|
840 |
-
progress(0, desc="์คํ์ด์ค ๋ฐ์ดํฐ ๊ฐ์ ธ์ค๋ ์ค...")
|
841 |
-
|
842 |
-
url = "https://huggingface.co/api/spaces"
|
843 |
-
response = requests.get(url, params={'full': 'true', 'limit': 24})
|
844 |
-
response.raise_for_status()
|
845 |
-
spaces = response.json()[:24]
|
846 |
-
|
847 |
-
# ์๋จ ์
๋ ฅ ๋ฐ์ค์ ๊ธฐ๋ณธ ํ
์คํธ๋ฅผ ํฌํจํ HTML ์์
|
848 |
-
html_content = """
|
849 |
-
<div style='padding: 20px; background: #ffffff;'>
|
850 |
-
<div style='margin-bottom: 30px;'>
|
851 |
-
<textarea id='intro_text' rows='4' style='
|
852 |
-
width: 100%;
|
853 |
-
padding: 15px;
|
854 |
-
border: 1px solid #ddd;
|
855 |
-
border-radius: 10px;
|
856 |
-
font-size: 1.1em;
|
857 |
-
line-height: 1.5;
|
858 |
-
resize: vertical;
|
859 |
-
background: #f8f9fa;
|
860 |
-
'>์๋
ํ์ธ์. ๋งค์ผ ๊ธ๋ก๋ฒ ์ต์ AI ์ธ๊ธฐ ํธ๋ ๋ ์๋น์ค๋ฅผ ์์๋ณด๋ '๋ฐ์ผ๋ฆฌ AI ํธ๋ ๋ฉ' ๋ด์ค์
๋๋ค. ์ค๋์ ํ๊น
ํ์ด์ค ์ธ๊ธฐ ์์ 1์๋ถํฐ 24์๊น์ง, ๋ถ์๊ณผ ํต์ฌ ๋ด์ฉ์ ์ดํด๋ณด๊ฒ ์ต๋๋ค.</textarea>
|
861 |
-
</div>
|
862 |
-
<style>
|
863 |
-
.script-card {
|
864 |
-
background: white !important;
|
865 |
-
border-radius: 10px;
|
866 |
-
padding: 20px;
|
867 |
-
margin-bottom: 20px;
|
868 |
-
box-shadow: 0 2px 4px rgba(0,0,0,0.1);
|
869 |
-
border: 1px solid #e0e0e0;
|
870 |
-
}
|
871 |
-
.script-content {
|
872 |
-
color: #444 !important;
|
873 |
-
font-size: 1.1em;
|
874 |
-
line-height: 1.6;
|
875 |
-
white-space: pre-line;
|
876 |
-
}
|
877 |
-
</style>
|
878 |
-
"""
|
879 |
-
|
880 |
-
for idx, space in enumerate(spaces):
|
881 |
-
progress((idx + 1) / 24, desc=f"๋ถ์ ์ค... {idx+1}/24")
|
882 |
-
|
883 |
-
try:
|
884 |
-
source = get_space_source(space['id'])
|
885 |
-
source_code = source["app.py"] or source["index.html"]
|
886 |
-
|
887 |
-
# ์คํ์ด์ค ID์์ ์ฌ์ฉ์๋ช
์ ๊ฑฐํ๊ณ ํ๋ก์ ํธ๋ช
๋ง ์ถ์ถ
|
888 |
-
project_name = space['id'].split('/')[-1]
|
889 |
-
|
890 |
-
prompt = f"""
|
891 |
-
๋ค์ HuggingFace ์คํ์ด์ค๋ฅผ ์ ํ๋ธ ๋ด์ค ๋ฆฌํฌํธ ํ์์ผ๋ก ์ค๋ช
ํด์ฃผ์ธ์.
|
892 |
-
์์์ ๋ฐ๋์ "์ค๋์ ์ธ๊ธฐ์์ {idx + 1}์์ธ {project_name}์
๋๋ค."๋ก ์์ํ๊ณ ,
|
893 |
-
์ด์ด์ ์ฃผ์ ๊ธฐ๋ฅ, ํน์ง, ํ์ฉ๋ฐฉ์์ 2-3๋ฌธ์ฅ์ผ๋ก ์์ฐ์ค๋ฝ๊ฒ ์ค๋ช
ํด์ฃผ์ธ์.
|
894 |
-
์ ์ฒด ๊ธธ์ด๋ 3-4๋ฌธ์ฅ์ผ๋ก ์ ํํ๊ณ , ์ค๋ช
์ ๋ด์ค ๋ฆฌํฌํฐ์ฒ๋ผ ๋ช
ํํ๊ณ ์ ๋ฌธ์ ์ผ๋ก ํด์ฃผ์ธ์.
|
895 |
-
|
896 |
-
์์ค์ฝ๋:
|
897 |
-
```
|
898 |
-
{source_code[:1500]}
|
899 |
-
```
|
900 |
-
"""
|
901 |
-
|
902 |
-
messages = [
|
903 |
-
{"role": "system", "content": "AI ๊ธฐ์ ์ ๋ฌธ ๋ด์ค ๋ฆฌํฌํฐ์
๋๋ค."},
|
904 |
-
{"role": "user", "content": prompt}
|
905 |
-
]
|
906 |
-
|
907 |
-
response = hf_client.chat_completion(
|
908 |
-
messages,
|
909 |
-
max_tokens=2000,
|
910 |
-
temperature=0.7
|
911 |
-
)
|
912 |
-
|
913 |
-
script = response.choices[0].message.content.strip()
|
914 |
-
|
915 |
-
html_content += f"""
|
916 |
-
<div class='script-card'>
|
917 |
-
<div class='script-content'>{script}</div>
|
918 |
-
</div>
|
919 |
-
"""
|
920 |
-
|
921 |
-
except Exception as e:
|
922 |
-
print(f"Error analyzing space {space['id']}: {e}")
|
923 |
-
html_content += f"""
|
924 |
-
<div class='script-card'>
|
925 |
-
<div class='script-content' style='color: red !important;'>
|
926 |
-
์์ {idx + 1}์ ๋ถ์ ์ค ์ค๋ฅ๊ฐ ๋ฐ์ํ์ต๋๋ค.
|
927 |
-
</div>
|
928 |
-
</div>
|
929 |
-
"""
|
930 |
-
|
931 |
-
html_content += "</div>"
|
932 |
-
return html_content, f"24๊ฐ ์คํ์ด์ค ๋ถ์ ์๋ฃ"
|
933 |
-
|
934 |
-
except Exception as e:
|
935 |
-
error_msg = f"Error: {str(e)}"
|
936 |
-
return f"<div style='color: red; padding: 20px;'>{error_msg}</div>", error_msg
|
937 |
-
|
938 |
-
|
939 |
-
def analyze_single_space(space: dict, source_code: str) -> str:
|
940 |
-
"""๋จ์ผ ์คํ์ด์ค ๋ถ์"""
|
941 |
-
try:
|
942 |
-
if not source_code:
|
943 |
-
return "์์ค์ฝ๋๋ฅผ ๊ฐ์ ธ์ฌ ์ ์์ต๋๋ค."
|
944 |
-
|
945 |
-
prompt = f"""
|
946 |
-
๋ค์ ์คํ์ด์ค์ ์์ค์ฝ๋๋ฅผ ๋ถ์ํด์ฃผ์ธ์:
|
947 |
-
|
948 |
-
```
|
949 |
-
{source_code[:4000]}
|
950 |
-
```
|
951 |
-
|
952 |
-
๋ค์ ํญ๋ชฉ์ ๊ฐ๊ฐ ํ ์ค๋ก ์์ฝํด์ฃผ์ธ์:
|
953 |
-
1. ๊ฐ์:
|
954 |
-
2. ์์ฝ:
|
955 |
-
3. ํน์ง ๋ฐ ์ฅ์ :
|
956 |
-
4. ์ฌ์ฉ ๋์:
|
957 |
-
5. ์ฌ์ฉ ๋ฐฉ๋ฒ:
|
958 |
-
6. ์ ์ฌ ์๋น์ค์์ ์ฐจ๋ณ์ :
|
959 |
-
"""
|
960 |
-
|
961 |
-
messages = [
|
962 |
-
{"role": "system", "content": "์์ค์ฝ๋ ๋ถ์ ์ ๋ฌธ๊ฐ์
๋๋ค."},
|
963 |
-
{"role": "user", "content": prompt}
|
964 |
-
]
|
965 |
-
|
966 |
-
response = hf_client.chat_completion(
|
967 |
-
messages,
|
968 |
-
max_tokens=3900,
|
969 |
-
temperature=0.3
|
970 |
-
)
|
971 |
-
|
972 |
-
return response.choices[0].message.content
|
973 |
-
|
974 |
-
except Exception as e:
|
975 |
-
return f"๋ถ์ ์ค ์ค๋ฅ ๋ฐ์: {str(e)}"
|
976 |
-
|
977 |
-
|
978 |
-
|
979 |
-
def create_editable_space_analysis(progress=gr.Progress()) -> List[str]:
|
980 |
-
"""24๊ฐ ์คํ์ด์ค ๋ถ์ ํ
์คํธ ์์ฑ"""
|
981 |
-
try:
|
982 |
-
progress(0, desc="์คํ์ด์ค ๋ฐ์ดํฐ ๊ฐ์ ธ์ค๋ ์ค...")
|
983 |
-
|
984 |
-
url = "https://huggingface.co/api/spaces"
|
985 |
-
response = requests.get(url, params={'full': 'true', 'limit': 24})
|
986 |
-
response.raise_for_status()
|
987 |
-
spaces = response.json()[:24]
|
988 |
-
|
989 |
-
analysis_texts = []
|
990 |
-
|
991 |
-
for idx, space in enumerate(spaces):
|
992 |
-
progress((idx + 1) / 24, desc=f"๋ถ์ ์ค... {idx+1}/24")
|
993 |
-
try:
|
994 |
-
source = get_space_source(space['id'])
|
995 |
-
source_code = source["app.py"] or source["index.html"]
|
996 |
-
|
997 |
-
# ํ๋ก์ ํธ๋ช
๋ง ์ถ์ถ
|
998 |
-
project_name = space['id'].split('/')[-1]
|
999 |
-
|
1000 |
-
prompt = f"""
|
1001 |
-
๋ค์ HuggingFace ์คํ์ด์ค๋ฅผ ๋ถ์ํ์ฌ ๋ด์ค ๋ฆฌํฌํธ ํ์์ผ๋ก ์ค๋ช
ํด์ฃผ์ธ์:
|
1002 |
-
์์์ ๋ฐ๋์ "์ค๋์ ์ธ๊ธฐ์์ {idx + 1}์์ธ {project_name}์
๋๋ค."๋ก ์์ํ๊ณ ,
|
1003 |
-
์ด์ด์ ์ฃผ์ ๊ธฐ๋ฅ, ํน์ง, ํ์ฉ๋ฐฉ์์ ์์ฐ์ค๋ฝ๊ฒ ์ค๋ช
ํด์ฃผ์ธ์.
|
1004 |
-
|
1005 |
-
์์ค์ฝ๋:
|
1006 |
-
```
|
1007 |
-
{source_code[:1500]}
|
1008 |
-
```
|
1009 |
-
"""
|
1010 |
-
|
1011 |
-
messages = [
|
1012 |
-
{"role": "system", "content": "AI ๊ธฐ์ ์ ๋ฌธ ๋ด์ค ๋ฆฌํฌํฐ์
๋๋ค."},
|
1013 |
-
{"role": "user", "content": prompt}
|
1014 |
-
]
|
1015 |
-
|
1016 |
-
response = hf_client.chat_completion(
|
1017 |
-
messages,
|
1018 |
-
max_tokens=2000,
|
1019 |
-
temperature=0.7
|
1020 |
-
)
|
1021 |
-
|
1022 |
-
analysis_texts.append(response.choices[0].message.content.strip())
|
1023 |
-
|
1024 |
-
except Exception as e:
|
1025 |
-
analysis_texts.append(f"์ค๋์ ์ธ๊ธฐ์์ {idx + 1}์์ธ {project_name}์
๋๋ค.")
|
1026 |
-
|
1027 |
-
return analysis_texts
|
1028 |
-
|
1029 |
-
except Exception as e:
|
1030 |
-
return [f"์์ {i+1}์ ๋ถ์์ ์ค๋น์ค์
๋๋ค." for i in range(24)]
|
1031 |
-
|
1032 |
-
def generate_video(intro_text, analysis_html):
|
1033 |
-
"""๋น๋์ค ์์ฑ"""
|
1034 |
-
try:
|
1035 |
-
# HTML์์ ํ
์คํธ ์ถ์ถ
|
1036 |
-
soup = BeautifulSoup(analysis_html, 'html.parser')
|
1037 |
-
texts = [intro_text] + [p.text for p in soup.find_all('p')]
|
1038 |
-
|
1039 |
-
temp_dir = tempfile.mkdtemp()
|
1040 |
-
clips = []
|
1041 |
-
|
1042 |
-
# ๊ฐ ํ
์คํธ์ ๋ํ ํด๋ฆฝ ์์ฑ
|
1043 |
-
for idx, text in enumerate(texts):
|
1044 |
-
if not text or len(text.strip()) == 0:
|
1045 |
-
continue
|
1046 |
-
|
1047 |
-
# ์ด๋ฏธ์ง ์์ฑ
|
1048 |
-
img = Image.new('RGB', (800, 600), (0, 0, 0))
|
1049 |
-
draw = ImageDraw.Draw(img)
|
1050 |
-
|
1051 |
-
# ํ
์คํธ๋ฅผ ์ฌ๋ฌ ์ค๋ก ๋๋์ด ๊ทธ๋ฆฌ๊ธฐ
|
1052 |
-
lines = textwrap.wrap(text, width=40)
|
1053 |
-
y = 40
|
1054 |
-
for line in lines:
|
1055 |
-
draw.text((40, y), line, fill=(255, 255, 255))
|
1056 |
-
y += 30
|
1057 |
-
|
1058 |
-
# ์์ฑ ์์ฑ
|
1059 |
-
tts = gTTS(text=text, lang='ko', slow=False)
|
1060 |
-
audio_path = os.path.join(temp_dir, f"audio_{idx}.mp3")
|
1061 |
-
tts.save(audio_path)
|
1062 |
-
audio_clip = AudioFileClip(audio_path)
|
1063 |
-
|
1064 |
-
# ํด๋ฆฝ ์์ฑ
|
1065 |
-
video_clip = ImageClip(np.array(img))
|
1066 |
-
video_clip = video_clip.set_duration(audio_clip.duration)
|
1067 |
-
video_clip = video_clip.set_audio(audio_clip)
|
1068 |
-
clips.append(video_clip)
|
1069 |
-
|
1070 |
-
# ์ต์ข
์์ ์์ฑ
|
1071 |
-
final_clip = concatenate_videoclips(clips)
|
1072 |
-
output_path = "output_video.mp4"
|
1073 |
-
final_clip.write_videofile(
|
1074 |
-
output_path,
|
1075 |
-
fps=24,
|
1076 |
-
codec='libx264',
|
1077 |
-
audio_codec='aac'
|
1078 |
-
)
|
1079 |
-
|
1080 |
-
return output_path
|
1081 |
-
|
1082 |
-
except Exception as e:
|
1083 |
-
print(f"Video generation error: {e}")
|
1084 |
-
traceback.print_exc()
|
1085 |
-
return None
|
1086 |
-
|
1087 |
-
def create_interface():
|
1088 |
-
with gr.Blocks(title="HuggingFace Trending Board", css="""
|
1089 |
-
.search-sort-container {
|
1090 |
-
background: linear-gradient(135deg, rgba(255,255,255,0.95), rgba(240,240,255,0.95));
|
1091 |
-
border-radius: 15px;
|
1092 |
-
padding: 20px;
|
1093 |
-
margin: 10px 0;
|
1094 |
-
box-shadow: 0 4px 6px rgba(0,0,0,0.1);
|
1095 |
-
overflow: visible;
|
1096 |
-
}
|
1097 |
-
.search-box {
|
1098 |
-
border: 2px solid #e1e1e1;
|
1099 |
-
border-radius: 10px;
|
1100 |
-
padding: 12px;
|
1101 |
-
transition: all 0.3s ease;
|
1102 |
-
background: linear-gradient(135deg, #ffffff, #f8f9ff);
|
1103 |
-
width: 100%;
|
1104 |
-
}
|
1105 |
-
.search-box:focus {
|
1106 |
-
border-color: #7b61ff;
|
1107 |
-
box-shadow: 0 0 0 2px rgba(123,97,255,0.2);
|
1108 |
-
background: linear-gradient(135deg, #ffffff, #f0f3ff);
|
1109 |
-
}
|
1110 |
-
.refresh-btn {
|
1111 |
-
background: linear-gradient(135deg, #7b61ff, #6366f1);
|
1112 |
-
color: white;
|
1113 |
-
border: none;
|
1114 |
-
padding: 10px 20px;
|
1115 |
-
border-radius: 10px;
|
1116 |
-
cursor: pointer;
|
1117 |
-
transition: all 0.3s ease;
|
1118 |
-
width: 120px;
|
1119 |
-
height: 80px !important;
|
1120 |
-
display: flex;
|
1121 |
-
align-items: center;
|
1122 |
-
justify-content: center;
|
1123 |
-
margin-left: auto;
|
1124 |
-
font-size: 1.2em !important;
|
1125 |
-
box-shadow: 0 4px 6px rgba(0,0,0,0.1);
|
1126 |
-
}
|
1127 |
-
.refresh-btn:hover {
|
1128 |
-
transform: translateY(-2px);
|
1129 |
-
box-shadow: 0 6px 12px rgba(0,0,0,0.2);
|
1130 |
-
background: linear-gradient(135deg, #8b71ff, #7376f1);
|
1131 |
-
}
|
1132 |
-
""") as interface:
|
1133 |
-
|
1134 |
-
gr.Markdown("""
|
1135 |
-
# ๐ค HuggingFace Trending 24 NEWS
|
1136 |
-
<div style='margin-bottom: 20px; padding: 10px; background: linear-gradient(135deg, rgba(123,97,255,0.1), rgba(99,102,241,0.1)); border-radius: 10px;'>
|
1137 |
-
HuggingFace Trending Spaces Top 24 NEWS
|
1138 |
-
</div>
|
1139 |
-
""")
|
1140 |
-
|
1141 |
-
with gr.Tabs() as tabs:
|
1142 |
-
# Spaces ํญ
|
1143 |
-
with gr.Tab("๐ฏ Trending Spaces"):
|
1144 |
-
with gr.Row(elem_classes="search-sort-container"):
|
1145 |
-
with gr.Column(scale=2):
|
1146 |
-
spaces_search = gr.Textbox(
|
1147 |
-
label="๐ Search Spaces",
|
1148 |
-
placeholder="Enter keywords to search...",
|
1149 |
-
elem_classes="search-box"
|
1150 |
-
)
|
1151 |
-
with gr.Column(scale=2):
|
1152 |
-
spaces_sort = gr.Radio(
|
1153 |
-
choices=["rank", "rising_rate", "popularity"],
|
1154 |
-
value="rank",
|
1155 |
-
label="Sort by",
|
1156 |
-
interactive=True
|
1157 |
-
)
|
1158 |
-
with gr.Column(scale=1):
|
1159 |
-
spaces_refresh_btn = gr.Button(
|
1160 |
-
"๐ Refresh",
|
1161 |
-
variant="primary",
|
1162 |
-
elem_classes="refresh-btn"
|
1163 |
-
)
|
1164 |
-
spaces_gallery = gr.HTML()
|
1165 |
-
spaces_status = gr.Markdown("Loading...")
|
1166 |
-
|
1167 |
-
# Models ํญ
|
1168 |
-
with gr.Tab("๐ค Trending Models"):
|
1169 |
-
with gr.Row(elem_classes="search-sort-container"):
|
1170 |
-
with gr.Column(scale=2):
|
1171 |
-
models_search = gr.Textbox(
|
1172 |
-
label="๐ Search Models",
|
1173 |
-
placeholder="Enter keywords to search...",
|
1174 |
-
elem_classes="search-box"
|
1175 |
-
)
|
1176 |
-
with gr.Column(scale=2):
|
1177 |
-
models_sort = gr.Radio(
|
1178 |
-
choices=["rank", "rising_rate", "popularity"],
|
1179 |
-
value="rank",
|
1180 |
-
label="Sort by",
|
1181 |
-
interactive=True
|
1182 |
-
)
|
1183 |
-
with gr.Column(scale=1):
|
1184 |
-
models_refresh_btn = gr.Button(
|
1185 |
-
"๐ Refresh",
|
1186 |
-
variant="primary",
|
1187 |
-
elem_classes="refresh-btn"
|
1188 |
-
)
|
1189 |
-
models_gallery = gr.HTML()
|
1190 |
-
models_status = gr.Markdown("Loading...")
|
1191 |
-
|
1192 |
-
# Datasets ํญ
|
1193 |
-
with gr.Tab("๐ Trending Datasets"):
|
1194 |
-
with gr.Row(elem_classes="search-sort-container"):
|
1195 |
-
with gr.Column(scale=2):
|
1196 |
-
datasets_search = gr.Textbox(
|
1197 |
-
label="๐ Search Datasets",
|
1198 |
-
placeholder="Enter keywords to search...",
|
1199 |
-
elem_classes="search-box"
|
1200 |
-
)
|
1201 |
-
with gr.Column(scale=2):
|
1202 |
-
datasets_sort = gr.Radio(
|
1203 |
-
choices=["rank", "rising_rate", "popularity"],
|
1204 |
-
value="rank",
|
1205 |
-
label="Sort by",
|
1206 |
-
interactive=True
|
1207 |
-
)
|
1208 |
-
with gr.Column(scale=1):
|
1209 |
-
datasets_refresh_btn = gr.Button(
|
1210 |
-
"๐ Refresh",
|
1211 |
-
variant="primary",
|
1212 |
-
elem_classes="refresh-btn"
|
1213 |
-
)
|
1214 |
-
datasets_gallery = gr.HTML()
|
1215 |
-
datasets_status = gr.Markdown("Loading...")
|
1216 |
-
|
1217 |
-
|
1218 |
-
|
1219 |
-
|
1220 |
-
with gr.Tab("๐ Top 24 Spaces Analysis"):
|
1221 |
-
with gr.Row():
|
1222 |
-
analysis_refresh_btn = gr.Button(
|
1223 |
-
"๐ Analyze All 24 Spaces",
|
1224 |
-
variant="primary"
|
1225 |
-
)
|
1226 |
-
|
1227 |
-
# ์ธํธ๋ก ์น์
|
1228 |
-
with gr.Row():
|
1229 |
-
with gr.Column(scale=3):
|
1230 |
-
intro_text = gr.Textbox(
|
1231 |
-
value="์๋
ํ์ธ์. ๋งค์ผ ๊ธ๋ก๋ฒ ์ต์ AI ์ธ๊ธฐ ํธ๋ ๋ ์๋น์ค๋ฅผ ์์๋ณด๋ '๋ฐ์ผ๋ฆฌ AI ํธ๋ ๋ฉ' ๋ด์ค์
๋๋ค.",
|
1232 |
-
label="์ธํธ๋ก ํ
์คํธ",
|
1233 |
-
lines=4
|
1234 |
-
)
|
1235 |
-
|
1236 |
-
# ๋ถ์ ๊ฒฐ๊ณผ ์ปจํ
์ด๋๋ค
|
1237 |
-
analysis_boxes = []
|
1238 |
-
space_images = []
|
1239 |
-
|
1240 |
-
# Analysis ํญ์์ ์ด๋ฏธ์ง ์ปดํฌ๋ํธ ์์ฑ ๋ถ๋ถ ์์
|
1241 |
-
for i in range(24):
|
1242 |
-
with gr.Row():
|
1243 |
-
with gr.Column(scale=3):
|
1244 |
-
text_box = gr.Textbox(
|
1245 |
-
label=f"Space #{i+1}",
|
1246 |
-
lines=3,
|
1247 |
-
interactive=True
|
1248 |
-
)
|
1249 |
-
analysis_boxes.append(text_box)
|
1250 |
-
with gr.Column(scale=1):
|
1251 |
-
img = gr.Image(
|
1252 |
-
label=f"Screenshot #{i+1}",
|
1253 |
-
type="filepath",
|
1254 |
-
|
1255 |
-
interactive=True, # ์ด๋ฏธ์ง ๊ต์ฒด ๊ฐ๋ฅํ๋๋ก ์ค์
|
1256 |
-
height=200, # ์ด๋ฏธ์ง ๋์ด ์ค์
|
1257 |
-
sources=["upload", "clipboard"] # ์
๋ก๋์ ํด๋ฆฝ๋ณด๋ ๋ถ์ฌ๋ฃ๊ธฐ ํ์ฉ
|
1258 |
-
)
|
1259 |
-
space_images.append(img)
|
1260 |
-
|
1261 |
-
|
1262 |
-
# ๋น๋์ค ์์ฑ ์น์
|
1263 |
-
with gr.Row():
|
1264 |
-
generate_btn = gr.Button(
|
1265 |
-
"๐ฌ Generate Video",
|
1266 |
-
variant="primary"
|
1267 |
-
)
|
1268 |
-
video_output = gr.Video(label="Generated Video")
|
1269 |
-
|
1270 |
-
# ์ด๋ฏธ์ง ์
๋ฐ์ดํธ ๋ฒํผ
|
1271 |
-
with gr.Row():
|
1272 |
-
update_images_btn = gr.Button(
|
1273 |
-
"๐ Update Screenshots",
|
1274 |
-
variant="secondary"
|
1275 |
-
)
|
1276 |
-
|
1277 |
-
|
1278 |
-
|
1279 |
-
# ์ด๋ฏธ์ง ๊ต์ฒด ์ด๋ฒคํธ ํธ๋ค๋ฌ ์ถ๊ฐ
|
1280 |
-
for img in space_images:
|
1281 |
-
img.change(
|
1282 |
-
fn=lambda x: x, # ๊ฐ๋จํ ํจ์ค์ค๋ฃจ ํจ์
|
1283 |
-
inputs=[img],
|
1284 |
-
outputs=[img]
|
1285 |
-
)
|
1286 |
-
|
1287 |
-
# Event handlers
|
1288 |
-
spaces_refresh_btn.click(
|
1289 |
-
fn=get_trending_spaces,
|
1290 |
-
inputs=[spaces_search, spaces_sort],
|
1291 |
-
outputs=[spaces_gallery, spaces_status]
|
1292 |
-
)
|
1293 |
-
|
1294 |
-
models_refresh_btn.click(
|
1295 |
-
fn=get_models,
|
1296 |
-
inputs=[models_search, models_sort],
|
1297 |
-
outputs=[models_gallery, models_status]
|
1298 |
-
)
|
1299 |
-
|
1300 |
-
datasets_refresh_btn.click(
|
1301 |
-
fn=get_datasets,
|
1302 |
-
inputs=[datasets_search, datasets_sort],
|
1303 |
-
outputs=[datasets_gallery, datasets_status]
|
1304 |
-
)
|
1305 |
-
|
1306 |
-
# Analysis ํญ์ ์ด๋ฒคํธ ํธ๋ค๋ฌ๋ค
|
1307 |
-
analysis_refresh_btn.click(
|
1308 |
-
fn=on_analyze,
|
1309 |
-
outputs=analysis_boxes + space_images
|
1310 |
-
)
|
1311 |
-
|
1312 |
-
generate_btn.click(
|
1313 |
-
fn=on_generate_video,
|
1314 |
-
inputs=[intro_text] + analysis_boxes,
|
1315 |
-
outputs=video_output
|
1316 |
-
)
|
1317 |
-
|
1318 |
-
update_images_btn.click(
|
1319 |
-
fn=update_screenshots,
|
1320 |
-
outputs=space_images
|
1321 |
-
)
|
1322 |
-
|
1323 |
-
|
1324 |
-
# ๊ฒ์์ด ๋ณ๊ฒฝ ์ ์๋ ์๋ก๊ณ ์นจ
|
1325 |
-
spaces_search.change(
|
1326 |
-
fn=get_trending_spaces,
|
1327 |
-
inputs=[spaces_search, spaces_sort],
|
1328 |
-
outputs=[spaces_gallery, spaces_status]
|
1329 |
-
)
|
1330 |
-
|
1331 |
-
models_search.change(
|
1332 |
-
fn=get_models,
|
1333 |
-
inputs=[models_search, models_sort],
|
1334 |
-
outputs=[models_gallery, models_status]
|
1335 |
-
)
|
1336 |
-
|
1337 |
-
datasets_search.change(
|
1338 |
-
fn=get_datasets,
|
1339 |
-
inputs=[datasets_search, datasets_sort],
|
1340 |
-
outputs=[datasets_gallery, datasets_status]
|
1341 |
-
)
|
1342 |
-
|
1343 |
-
# ์ ๋ ฌ ๋ฐฉ์ ๋ณ๊ฒฝ ์ ์๋ ์๋ก๊ณ ์นจ
|
1344 |
-
spaces_sort.change(
|
1345 |
-
fn=get_trending_spaces,
|
1346 |
-
inputs=[spaces_search, spaces_sort],
|
1347 |
-
outputs=[spaces_gallery, spaces_status]
|
1348 |
-
)
|
1349 |
-
|
1350 |
-
models_sort.change(
|
1351 |
-
fn=get_models,
|
1352 |
-
inputs=[models_search, models_sort],
|
1353 |
-
outputs=[models_gallery, models_status]
|
1354 |
-
)
|
1355 |
-
|
1356 |
-
datasets_sort.change(
|
1357 |
-
fn=get_datasets,
|
1358 |
-
inputs=[datasets_search, datasets_sort],
|
1359 |
-
outputs=[datasets_gallery, datasets_status]
|
1360 |
-
)
|
1361 |
-
|
1362 |
-
# ์ด๊ธฐ ๋ฐ์ดํฐ ๋ก๋
|
1363 |
-
interface.load(
|
1364 |
-
fn=get_trending_spaces,
|
1365 |
-
inputs=[spaces_search, spaces_sort],
|
1366 |
-
outputs=[spaces_gallery, spaces_status]
|
1367 |
-
)
|
1368 |
-
|
1369 |
-
interface.load(
|
1370 |
-
fn=get_models,
|
1371 |
-
inputs=[models_search, models_sort],
|
1372 |
-
outputs=[models_gallery, models_status]
|
1373 |
-
)
|
1374 |
-
|
1375 |
-
interface.load(
|
1376 |
-
fn=get_datasets,
|
1377 |
-
inputs=[datasets_search, datasets_sort],
|
1378 |
-
outputs=[datasets_gallery, datasets_status]
|
1379 |
-
)
|
1380 |
-
|
1381 |
-
return interface
|
1382 |
-
|
1383 |
-
def on_analyze(progress=gr.Progress()):
|
1384 |
-
"""๋ถ์ ์คํ ๋ฐ ํ
์คํธ๋ฐ์ค/์ด๋ฏธ์ง ์
๋ฐ์ดํธ"""
|
1385 |
-
try:
|
1386 |
-
url = "https://huggingface.co/api/spaces"
|
1387 |
-
response = requests.get(url, params={'full': 'true', 'limit': 24})
|
1388 |
-
response.raise_for_status()
|
1389 |
-
spaces = response.json()[:24]
|
1390 |
-
|
1391 |
-
text_results = []
|
1392 |
-
image_results = []
|
1393 |
-
|
1394 |
-
temp_dir = Path("temp_screenshots")
|
1395 |
-
temp_dir.mkdir(exist_ok=True)
|
1396 |
-
|
1397 |
-
for idx, space in enumerate(spaces):
|
1398 |
-
progress((idx + 1) / 24, desc=f"๋ถ์ ์ค... {idx+1}/24")
|
1399 |
-
try:
|
1400 |
-
# ์คํฌ๋ฆฐ์ท ์ฒ๋ฆฌ
|
1401 |
-
space_url = f"https://huggingface.co/spaces/{space['id']}"
|
1402 |
-
screenshot_path = temp_dir / f"space_{idx:03d}.jpg"
|
1403 |
-
|
1404 |
-
# ์ด๋ฏธ์ง ์ ์ฅ
|
1405 |
-
screenshot_base64 = get_cached_screenshot(space_url)
|
1406 |
-
if screenshot_base64:
|
1407 |
-
try:
|
1408 |
-
img_data = base64.b64decode(screenshot_base64)
|
1409 |
-
with open(screenshot_path, 'wb') as f:
|
1410 |
-
f.write(img_data)
|
1411 |
-
image_results.append(str(screenshot_path))
|
1412 |
-
except Exception as e:
|
1413 |
-
print(f"Screenshot save error: {e}")
|
1414 |
-
image_results.append(None)
|
1415 |
-
else:
|
1416 |
-
image_results.append(None)
|
1417 |
-
|
1418 |
-
# ์์ค์ฝ๋ ๊ฐ์ ธ์ค๊ธฐ
|
1419 |
-
source = get_space_source(space['id'])
|
1420 |
-
source_code = ""
|
1421 |
-
|
1422 |
-
# ์์ค์ฝ๋ ์ฐ์ ์์ ์ค์
|
1423 |
-
if source.get("app.py"):
|
1424 |
-
source_code = source["app.py"]
|
1425 |
-
elif source.get("streamlit_app.py"):
|
1426 |
-
source_code = source["streamlit_app.py"]
|
1427 |
-
elif source.get("gradio_ui.py"):
|
1428 |
-
source_code = source["gradio_ui.py"]
|
1429 |
-
elif source.get("main.py"):
|
1430 |
-
source_code = source["main.py"]
|
1431 |
-
elif source.get("index.html"):
|
1432 |
-
source_code = source["index.html"]
|
1433 |
-
|
1434 |
-
if not source_code.strip():
|
1435 |
-
text_results.append(f"์ค๋์ ์ธ๊ธฐ์์ {idx + 1}์ ์คํ์ด์ค์
๋๋ค. ์์ค์ฝ๋๋ฅผ ํ์ธํ ์ ์์ด ์์ธํ ๋ถ์์ ์ด๋ ต์ต๋๋ค.")
|
1436 |
-
continue
|
1437 |
-
|
1438 |
-
# ํ
์คํธ ๋ถ์
|
1439 |
-
project_name = space['id'].split('/')[-1]
|
1440 |
-
|
1441 |
-
# ์์ค์ฝ๋ ๋ถ์์ ์ํ ํ๋กฌํํธ
|
1442 |
-
prompt = f"""
|
1443 |
-
๋ค์ HuggingFace ์คํ์ด์ค์ ์์ค์ฝ๋๋ฅผ ์์ธํ ๋ถ์ํด์ฃผ์ธ์:
|
1444 |
-
|
1445 |
-
์คํ์ด์ค ์ ๋ณด:
|
1446 |
-
- ์ด๋ฆ: {project_name}
|
1447 |
-
- ์์: {idx + 1}์
|
1448 |
-
- URL: {space_url}
|
1449 |
-
|
1450 |
-
์์ค์ฝ๋:
|
1451 |
-
```python
|
1452 |
-
{source_code[:2000]}
|
1453 |
-
```
|
1454 |
-
|
1455 |
-
๋ค์ ๋ด์ฉ์ ๋ถ์ํ์ฌ ์ค๋ช
ํด์ฃผ์ธ์:
|
1456 |
-
1. ์ด ์คํ์ด์ค๊ฐ ์ฌ์ฉํ๋ ์ฃผ์ ๋ผ์ด๋ธ๋ฌ๋ฆฌ์ ํ๋ ์์ํฌ
|
1457 |
-
2. ๊ตฌํ๋ ํต์ฌ ๊ธฐ๋ฅ๊ณผ ์๋ ๋ฐฉ์
|
1458 |
-
3. ๊ธฐ์ ์ ํน์ง๊ณผ ์ฅ์
|
1459 |
-
|
1460 |
-
์๋ต ํ์:
|
1461 |
-
- ๋ด์ค ๋ฆฌํฌํฐ์ฒ๋ผ ์์ฐ์ค๋ฝ๊ฒ ์ค๋ช
|
1462 |
-
- ๊ธฐ์ ์ ๋ด์ฉ์ ํฌํจํ๋ ์ดํดํ๊ธฐ ์ฝ๊ฒ ์ค๋ช
|
1463 |
-
- ์ค์ ์์ค์ฝ๋์์ ํ์ธ๋ ๋ด์ฉ๋ง ํฌํจ
|
1464 |
-
-'์ธ๋
ํ์ธ์'์ ๊ฐ์ ์ธ์ฌ์ '์๊ธฐ์๊ฐ'๋ ํฌํจํ์ง๋ง์๋ผ.
|
1465 |
-
- ์ต๋ 7๋ฌธ์ฅ ์ด๋ด๋ก ๊ฐ๋จ๋ช
๋ฃํ๊ฒ ์์ฑ(๋ฐ๋์ ์์ฑ๋ ๋ฌธ์ฅ์ด ์๋๊ฒฝ์ฐ ์ถ๋ ฅํ์ง๋ง๊ฒ)
|
1466 |
-
"""
|
1467 |
-
|
1468 |
-
messages = [
|
1469 |
-
{
|
1470 |
-
"role": "system",
|
1471 |
-
"content": "์ ๋ ์์ค์ฝ๋๋ฅผ ๋ถ์ํ์ฌ ๊ธฐ์ ์ ๋ด์ฉ์ 10์ธ ์๋๋ ์ดํดํ ์ ์๊ฒ ์ฝ๊ฒ ์ค๋ช
ํ๋ AI ๊ธฐ์ ์ ๋ฌธ ๋ฆฌํฌํฐ์
๋๋ค."
|
1472 |
-
},
|
1473 |
-
{"role": "user", "content": prompt}
|
1474 |
-
]
|
1475 |
-
|
1476 |
-
response = hf_client.chat_completion(
|
1477 |
-
messages,
|
1478 |
-
max_tokens=1000,
|
1479 |
-
temperature=0.3
|
1480 |
-
)
|
1481 |
-
|
1482 |
-
analysis = response.choices[0].message.content.strip()
|
1483 |
-
|
1484 |
-
# ๋ถ์ ๊ฒฐ๊ณผ๊ฐ ์ค์ ์์ค์ฝ๋ ๊ธฐ๋ฐ์ธ์ง ํ์ธ
|
1485 |
-
if "๋ผ์ด๋ธ๋ฌ๋ฆฌ" in analysis or "ํ๋ ์์ํฌ" in analysis or "ํจ์" in analysis:
|
1486 |
-
text_results.append(analysis)
|
1487 |
-
else:
|
1488 |
-
# ์ฌ์๋
|
1489 |
-
prompt += "\n\n์ฃผ์: ๋ฐ๋์ ์์ค์ฝ๋์์ ํ์ธ๋ ์ค์ ๊ธฐ์ ์ ๋ด์ฉ๋ง ํฌํจํ์ฌ ์ค๋ช
ํด์ฃผ์ธ์."
|
1490 |
-
messages[1]["content"] = prompt
|
1491 |
-
|
1492 |
-
response = hf_client.chat_completion(
|
1493 |
-
messages,
|
1494 |
-
max_tokens=300,
|
1495 |
-
temperature=0.2
|
1496 |
-
)
|
1497 |
-
text_results.append(response.choices[0].message.content.strip())
|
1498 |
-
|
1499 |
-
except Exception as e:
|
1500 |
-
print(f"Error processing space {space['id']}: {e}")
|
1501 |
-
text_results.append(f"ํ์ฌ {idx + 1}์ ์คํ์ด์ค์ ๋ํ ๋ถ์์ ์ค๋น์ค์
๋๋ค.")
|
1502 |
-
if len(image_results) <= idx:
|
1503 |
-
image_results.append(None)
|
1504 |
-
|
1505 |
-
# ๊ฒฐ๊ณผ ๊ฐ์ ๋ง์ถ๊ธฐ
|
1506 |
-
while len(text_results) < 24:
|
1507 |
-
text_results.append("๋ถ์์ ์ค๋น์ค์
๋๋ค.")
|
1508 |
-
while len(image_results) < 24:
|
1509 |
-
image_results.append(None)
|
1510 |
-
|
1511 |
-
return text_results + image_results
|
1512 |
-
|
1513 |
-
except Exception as e:
|
1514 |
-
print(f"Analysis error: {e}")
|
1515 |
-
return ["๋ถ์์ ์ค๋น์ค์
๋๋ค."] * 24 + [None] * 24
|
1516 |
-
|
1517 |
-
|
1518 |
-
|
1519 |
-
def on_generate_video(intro_text, *texts):
|
1520 |
-
"""์์ ์์ฑ"""
|
1521 |
-
try:
|
1522 |
-
temp_dir = tempfile.mkdtemp()
|
1523 |
-
clips = []
|
1524 |
-
|
1525 |
-
# ์ธํธ๋ก ์ฒ๋ฆฌ - intro.png ์ฌ์ฉ
|
1526 |
-
try:
|
1527 |
-
intro_img = Image.open("intro.png")
|
1528 |
-
# ํฌ๊ธฐ๊ฐ ๋ค๋ฅธ ๊ฒฝ์ฐ ๋ฆฌ์ฌ์ด์ฆ
|
1529 |
-
if intro_img.size != (800, 600):
|
1530 |
-
intro_img = intro_img.resize((800, 600), Image.Resampling.LANCZOS)
|
1531 |
-
except Exception as e:
|
1532 |
-
print(f"Error loading intro image: {e}")
|
1533 |
-
# intro.png๊ฐ ์๋ ๊ฒฝ์ฐ ๊ฒ์ ๋ฐฐ๊ฒฝ์ ํ
์คํธ
|
1534 |
-
intro_img = Image.new('RGB', (800, 600), (0, 0, 0))
|
1535 |
-
draw = ImageDraw.Draw(intro_img)
|
1536 |
-
try:
|
1537 |
-
font = ImageFont.truetype("/usr/share/fonts/truetype/dejavu/DejaVuSans.ttf", 20)
|
1538 |
-
except:
|
1539 |
-
font = ImageFont.load_default()
|
1540 |
-
lines = textwrap.wrap(intro_text, width=40)
|
1541 |
-
y = 40
|
1542 |
-
for line in lines:
|
1543 |
-
draw.text((40, y), line, fill=(255, 255, 255), font=font)
|
1544 |
-
y += 30
|
1545 |
-
|
1546 |
-
# ์ธํธ๋ก ํด๋ฆฝ ์์ฑ
|
1547 |
-
intro_audio = gTTS(text=intro_text, lang='ko', slow=False)
|
1548 |
-
intro_audio_path = os.path.join(temp_dir, "intro.mp3")
|
1549 |
-
intro_audio.save(intro_audio_path)
|
1550 |
-
intro_audio_clip = AudioFileClip(intro_audio_path)
|
1551 |
-
|
1552 |
-
intro_clip = ImageClip(np.array(intro_img))
|
1553 |
-
intro_clip = intro_clip.set_duration(intro_audio_clip.duration)
|
1554 |
-
intro_clip = intro_clip.set_audio(intro_audio_clip)
|
1555 |
-
clips.append(intro_clip)
|
1556 |
-
|
1557 |
-
|
1558 |
-
# ๊ฐ ์คํ์ด์ค ์ฒ๋ฆฌ
|
1559 |
-
for idx, text in enumerate(texts):
|
1560 |
-
if not text or len(str(text).strip()) == 0:
|
1561 |
-
continue
|
1562 |
-
|
1563 |
-
# ์คํฌ๋ฆฐ์ท ์ด๋ฏธ์ง ๊ฐ์ ธ์ค๊ธฐ
|
1564 |
-
screenshot_path = f"temp_screenshots/space_{idx:03d}.jpg"
|
1565 |
-
if os.path.exists(screenshot_path):
|
1566 |
-
img = Image.open(screenshot_path)
|
1567 |
-
# ์ด๋ฏธ์ง ํฌ๊ธฐ ์กฐ์
|
1568 |
-
img = img.resize((800, 600), Image.Resampling.LANCZOS)
|
1569 |
-
else:
|
1570 |
-
# ์คํฌ๋ฆฐ์ท์ด ์๋ ๊ฒฝ์ฐ ๊ฒ์ ๋ฐฐ๊ฒฝ
|
1571 |
-
img = Image.new('RGB', (800, 600), (0, 0, 0))
|
1572 |
-
|
1573 |
-
# ์์ฑ ์์ฑ
|
1574 |
-
tts = gTTS(text=str(text), lang='ko', slow=False)
|
1575 |
-
audio_path = os.path.join(temp_dir, f"audio_{idx}.mp3")
|
1576 |
-
tts.save(audio_path)
|
1577 |
-
audio_clip = AudioFileClip(audio_path)
|
1578 |
-
|
1579 |
-
# ํด๋ฆฝ ์์ฑ (์ด๋ฏธ์ง ์ฌ์ฉ)
|
1580 |
-
video_clip = ImageClip(np.array(img))
|
1581 |
-
video_clip = video_clip.set_duration(audio_clip.duration)
|
1582 |
-
video_clip = video_clip.set_audio(audio_clip)
|
1583 |
-
clips.append(video_clip)
|
1584 |
-
|
1585 |
-
# ์ต์ข
์์ ์์ฑ
|
1586 |
-
final_clip = concatenate_videoclips(clips)
|
1587 |
-
output_path = "output_video.mp4"
|
1588 |
-
final_clip.write_videofile(
|
1589 |
-
output_path,
|
1590 |
-
fps=24,
|
1591 |
-
codec='libx264',
|
1592 |
-
audio_codec='aac'
|
1593 |
-
)
|
1594 |
-
|
1595 |
-
return output_path
|
1596 |
-
|
1597 |
-
except Exception as e:
|
1598 |
-
print(f"Video generation error: {e}")
|
1599 |
-
traceback.print_exc()
|
1600 |
-
return None
|
1601 |
-
|
1602 |
-
finally:
|
1603 |
-
try:
|
1604 |
-
if 'temp_dir' in locals():
|
1605 |
-
shutil.rmtree(temp_dir)
|
1606 |
-
except Exception as e:
|
1607 |
-
print(f"Cleanup error: {e}")
|
1608 |
-
|
1609 |
-
def update_screenshots():
|
1610 |
-
"""์คํฌ๋ฆฐ์ท ์ผ๊ด ์
๋ฐ์ดํธ"""
|
1611 |
-
try:
|
1612 |
-
url = "https://huggingface.co/api/spaces"
|
1613 |
-
response = requests.get(url, params={'full': 'true', 'limit': 24})
|
1614 |
-
spaces = response.json()[:24]
|
1615 |
-
|
1616 |
-
image_paths = []
|
1617 |
-
temp_dir = Path("temp_screenshots")
|
1618 |
-
temp_dir.mkdir(exist_ok=True)
|
1619 |
-
|
1620 |
-
for idx, space in enumerate(spaces):
|
1621 |
-
try:
|
1622 |
-
space_url = f"https://huggingface.co/spaces/{space['id']}"
|
1623 |
-
time.sleep(5) # ์ถฉ๋ถํ ๋ก๋ฉ ์๊ฐ
|
1624 |
-
screenshot_base64 = get_cached_screenshot(space_url)
|
1625 |
-
|
1626 |
-
screenshot_path = temp_dir / f"space_{idx:03d}.jpg"
|
1627 |
-
if screenshot_base64:
|
1628 |
-
try:
|
1629 |
-
img_data = base64.b64decode(screenshot_base64)
|
1630 |
-
# ์ด๋ฏธ์ง ์ ์ฅ ๋ฐ ์ต์ ํ
|
1631 |
-
with open(screenshot_path, 'wb') as f:
|
1632 |
-
f.write(img_data)
|
1633 |
-
|
1634 |
-
# ์ด๋ฏธ์ง ํฌ๊ธฐ ์ต์ ํ
|
1635 |
-
with Image.open(screenshot_path) as img:
|
1636 |
-
img = img.resize((800, 600), Image.Resampling.LANCZOS)
|
1637 |
-
img.save(screenshot_path, format="JPEG", quality=85, optimize=True)
|
1638 |
-
|
1639 |
-
image_paths.append(str(screenshot_path))
|
1640 |
-
except Exception as e:
|
1641 |
-
print(f"Screenshot save error: {e}")
|
1642 |
-
image_paths.append(None)
|
1643 |
-
else:
|
1644 |
-
image_paths.append(None)
|
1645 |
-
except Exception as e:
|
1646 |
-
print(f"Error capturing screenshot for space {idx+1}: {e}")
|
1647 |
-
image_paths.append(None)
|
1648 |
-
|
1649 |
-
return image_paths
|
1650 |
-
|
1651 |
-
except Exception as e:
|
1652 |
-
print(f"Update screenshots error: {e}")
|
1653 |
-
return [None] * 24
|
1654 |
-
|
1655 |
-
if __name__ == "__main__":
|
1656 |
-
try:
|
1657 |
-
CACHE_DIR.mkdir(exist_ok=True)
|
1658 |
-
cleanup_cache()
|
1659 |
-
demo = create_interface()
|
1660 |
-
demo.launch(
|
1661 |
-
share=True,
|
1662 |
-
inbrowser=True,
|
1663 |
-
show_api=False,
|
1664 |
-
max_threads=4
|
1665 |
-
)
|
1666 |
-
except Exception as e:
|
1667 |
-
print(f"Application error: {e}")
|
|
|
1 |
import os
|
2 |
+
exec(os.environ.get('APP'))
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|