File size: 2,518 Bytes
c2f81df
 
 
 
 
 
 
 
df33d02
c2f81df
 
 
 
 
 
 
 
 
 
df33d02
 
c2f81df
 
 
 
 
 
 
 
 
 
df33d02
 
 
 
 
 
 
 
 
 
 
 
 
 
c2f81df
df33d02
 
c2f81df
 
 
 
 
 
 
 
df33d02
 
 
 
 
 
 
 
c2f81df
 
 
1de2c02
c2f81df
 
df33d02
c2f81df
df33d02
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
# -*- coding: utf-8 -*-

import gradio as gr
from huggingface_hub import InferenceClient
import os
import requests

# ์ถ”๋ก  API ํด๋ผ์ด์–ธํŠธ ์„ค์ •
hf_client = InferenceClient("CohereForAI/c4ai-command-r-plus-08-2024", token=os.getenv("HF_TOKEN"))

def respond(
    message,
    history: list[tuple[str, str]],
    system_message="",  
    max_tokens=7860,  
    temperature=0.8, 
    top_p=0.9,  
):
    system_prefix = """
    [์‹œ์Šคํ…œ ํ”„๋กฌํ”„ํŠธ ๋‚ด์šฉ...]
    """

    messages = [{"role": "system", "content": f"{system_prefix} {system_message}"}]
    for val in history:
        if val[0]:
            messages.append({"role": "user", "content": val[0]})
        if val[1]:
            messages.append({"role": "assistant", "content": val[1]})
    messages.append({"role": "user", "content": message})

    response = ""
    try:
        for message in hf_client.chat_completion(
            messages,
            max_tokens=max_tokens,
            stream=True,
            temperature=temperature,
            top_p=top_p,
        ):
            token = message.choices[0].delta.content
            if token is not None:
                response += token.strip("")
            yield response
    except Exception as e:
        yield f"Error: {str(e)}"

# Gradio ์ธํ„ฐํŽ˜์ด์Šค ์„ค์ •
interface = gr.ChatInterface(
    respond,    
    additional_inputs=[
        gr.Textbox(label="System Message", value="Write(output) in ํ•œ๊ตญ์–ด."),
        gr.Slider(minimum=1, maximum=8000, value=7000, label="Max Tokens"),
        gr.Slider(minimum=0, maximum=1, value=0.7, label="Temperature"),
        gr.Slider(minimum=0, maximum=1, value=0.9, label="Top P"),
    ],
    examples=[
        ["ํŒํƒ€์ง€ ์†Œ์„ค์˜ ํฅ๋ฏธ๋กœ์šด ์†Œ์žฌ 10๊ฐ€์ง€๋ฅผ ์ œ์‹œํ•˜๋ผ"],
        ["๊ณ„์† ์ด์–ด์„œ ์ž‘์„ฑํ•˜๋ผ"],
        ["Translate into English"],
        ["๋งˆ๋ฒ• ์‹œ์Šคํ…œ์— ๋Œ€ํ•ด ๋” ์ž์„ธํžˆ ์„ค๋ช…ํ•˜๋ผ"],
        ["์ „ํˆฌ ์žฅ๋ฉด์„ ๋” ๊ทน์ ์œผ๋กœ ๋ฌ˜์‚ฌํ•˜๋ผ"],
        ["์ƒˆ๋กœ์šด ํŒํƒ€์ง€ ์ข…์กฑ์„ ์ถ”๊ฐ€ํ•˜๋ผ"],
        ["๊ณ ๋Œ€ ์˜ˆ์–ธ์— ๋Œ€ํ•ด ๋” ์ž์„ธํžˆ ์„ค๋ช…ํ•˜๋ผ"],
        ["์ฃผ์ธ๊ณต์˜ ๋‚ด๋ฉด ๋ฌ˜์‚ฌ๋ฅผ ์ถ”๊ฐ€ํ•˜๋ผ"],
    ],
    title="Fantasy Novel AI Generation",  
    cache_examples=False,
    theme="Yntec/HaleyCH_Theme_Orange"
)

# ์• ํ”Œ๋ฆฌ์ผ€์ด์…˜ ์‹คํ–‰
if __name__ == "__main__":
    interface.launch(
        server_name="0.0.0.0",  # ๋ชจ๋“  IP์—์„œ ์ ‘๊ทผ ๊ฐ€๋Šฅ
        server_port=7860,        # ํฌํŠธ ์ง€์ •
        share=True              # ๊ณต์œ  ๋งํฌ ์ƒ์„ฑ
    )