Spaces:
Running
on
CPU Upgrade
Running
on
CPU Upgrade
Update app.py
Browse files
app.py
CHANGED
@@ -15,36 +15,20 @@ from urllib.parse import quote
|
|
15 |
import streamlit as st
|
16 |
import streamlit.components.v1 as components
|
17 |
|
18 |
-
# For demonstration, from huggingface_hub
|
|
|
19 |
from huggingface_hub import InferenceClient
|
20 |
|
21 |
-
# -----------------------------------------------------
|
22 |
-
# Ensure default MarkdownCode.md & MermaidCode.md exist
|
23 |
-
# -----------------------------------------------------
|
24 |
-
if not os.path.exists("MarkdownCode.md"):
|
25 |
-
with open("MarkdownCode.md", 'w', encoding='utf-8') as f:
|
26 |
-
f.write("# Default Markdown\nThis is a default Markdown file.")
|
27 |
-
st.rerun()
|
28 |
-
|
29 |
-
if not os.path.exists("MermaidCode.md"):
|
30 |
-
with open("MermaidCode.md", 'w', encoding='utf-8') as f:
|
31 |
-
# IMPORTANT: Each click line now has a 2nd string for tooltip
|
32 |
-
f.write("""flowchart LR
|
33 |
-
%% Minimal example with correct 'click' syntax
|
34 |
-
%% - "Tooltip text" between the URL and the target
|
35 |
-
A[Default] --> B[Example]
|
36 |
-
click A "/?q=Default" "Open Default" "_self"
|
37 |
-
click B "/?q=Example" "Open Example" "_self"
|
38 |
-
""")
|
39 |
-
st.rerun()
|
40 |
-
|
41 |
# ----------------------------
|
42 |
# Placeholder data structures
|
43 |
# ----------------------------
|
|
|
|
|
44 |
PromptPrefix = "AI-Search: "
|
45 |
PromptPrefix2 = "AI-Refine: "
|
46 |
PromptPrefix3 = "AI-JS: "
|
47 |
|
|
|
48 |
roleplaying_glossary = {
|
49 |
"Core Rulebooks": {
|
50 |
"Dungeons and Dragons": ["Player's Handbook", "Dungeon Master's Guide", "Monster Manual"],
|
@@ -55,14 +39,13 @@ roleplaying_glossary = {
|
|
55 |
}
|
56 |
}
|
57 |
|
|
|
58 |
transhuman_glossary = {
|
59 |
"Neural Interfaces": ["Cortex Jack", "Mind-Machine Fusion"],
|
60 |
"Cybernetics": ["Robotic Limbs", "Augmented Eyes"],
|
61 |
}
|
62 |
|
63 |
-
#
|
64 |
-
# Stub Methods
|
65 |
-
# ------------
|
66 |
def process_text(text):
|
67 |
st.write(f"process_text called with: {text}")
|
68 |
|
@@ -84,6 +67,7 @@ def process_video(video_file, seconds_per_frame):
|
|
84 |
def search_glossary(content):
|
85 |
st.write(f"search_glossary called with: {content}")
|
86 |
|
|
|
87 |
API_URL = "https://huggingface-inference-endpoint-placeholder"
|
88 |
API_KEY = "hf_XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX"
|
89 |
|
@@ -97,6 +81,10 @@ def InferenceLLM(prompt):
|
|
97 |
# --------------------------------------
|
98 |
@st.cache_resource
|
99 |
def display_glossary_entity(k):
|
|
|
|
|
|
|
|
|
100 |
search_urls = {
|
101 |
"๐๐ArXiv": lambda k: f"/?q={quote(k)}",
|
102 |
"๐Analyst": lambda k: f"/?q={quote(k)}-{quote(PromptPrefix)}",
|
@@ -114,6 +102,9 @@ def display_glossary_entity(k):
|
|
114 |
|
115 |
@st.cache_resource
|
116 |
def display_glossary_grid(roleplaying_glossary):
|
|
|
|
|
|
|
117 |
search_urls = {
|
118 |
"๐๐ArXiv": lambda k: f"/?q={quote(k)}",
|
119 |
"๐Analyst": lambda k: f"/?q={quote(k)}-{quote(PromptPrefix)}",
|
@@ -164,6 +155,9 @@ def get_zip_download_link(zip_file):
|
|
164 |
return href
|
165 |
|
166 |
def get_table_download_link(file_path):
|
|
|
|
|
|
|
167 |
try:
|
168 |
with open(file_path, 'r', encoding='utf-8') as file:
|
169 |
data = file.read()
|
@@ -189,6 +183,9 @@ def get_file_size(file_path):
|
|
189 |
return os.path.getsize(file_path)
|
190 |
|
191 |
def compare_and_delete_files(files):
|
|
|
|
|
|
|
192 |
if not files:
|
193 |
st.warning("No files to compare.")
|
194 |
return
|
@@ -196,6 +193,7 @@ def compare_and_delete_files(files):
|
|
196 |
for file in files:
|
197 |
size = os.path.getsize(file)
|
198 |
file_sizes.setdefault(size, []).append(file)
|
|
|
199 |
for size, paths in file_sizes.items():
|
200 |
if len(paths) > 1:
|
201 |
latest_file = max(paths, key=os.path.getmtime)
|
@@ -207,16 +205,14 @@ def compare_and_delete_files(files):
|
|
207 |
|
208 |
def FileSidebar():
|
209 |
"""
|
210 |
-
Renders the file sidebar with open/view/run/delete logic.
|
211 |
-
Excludes README.md from the list.
|
212 |
"""
|
213 |
all_files = glob.glob("*.md")
|
214 |
-
#
|
215 |
-
all_files = [f for f in all_files if f != 'README.md']
|
216 |
-
# Filter out short-named files if desired
|
217 |
all_files = [file for file in all_files if len(os.path.splitext(file)[0]) >= 5]
|
218 |
all_files.sort(key=lambda x: (os.path.splitext(x)[1], x), reverse=True)
|
219 |
|
|
|
220 |
Files1, Files2 = st.sidebar.columns(2)
|
221 |
with Files1:
|
222 |
if st.button("๐ Delete All"):
|
@@ -232,9 +228,11 @@ def FileSidebar():
|
|
232 |
file_name = ''
|
233 |
next_action = ''
|
234 |
|
|
|
235 |
for file in all_files:
|
236 |
col1, col2, col3, col4, col5 = st.sidebar.columns([1,6,1,1,1])
|
237 |
with col1:
|
|
|
238 |
if st.button("๐", key="md_"+file):
|
239 |
file_contents = load_file(file)
|
240 |
file_name = file
|
@@ -260,8 +258,12 @@ def FileSidebar():
|
|
260 |
with col5:
|
261 |
if st.button("๐", key="delete_"+file):
|
262 |
os.remove(file)
|
|
|
263 |
st.rerun()
|
|
|
|
|
264 |
|
|
|
265 |
file_sizes = [get_file_size(file) for file in all_files]
|
266 |
previous_size = None
|
267 |
st.sidebar.title("File Operations")
|
@@ -281,19 +283,22 @@ def FileSidebar():
|
|
281 |
st.rerun()
|
282 |
previous_size = size
|
283 |
|
|
|
284 |
if len(file_contents) > 0:
|
285 |
if next_action == 'open':
|
286 |
open1, open2 = st.columns([0.8, 0.2])
|
287 |
with open1:
|
288 |
-
file_name_input = st.text_input('File Name:', file_name, key='file_name_input')
|
289 |
file_content_area = st.text_area('File Contents:', file_contents, height=300, key='file_content_area')
|
290 |
|
|
|
291 |
if st.button('๐พ Save File'):
|
292 |
with open(file_name_input, 'w', encoding='utf-8') as f:
|
293 |
f.write(file_content_area)
|
294 |
st.markdown(f'Saved {file_name_input} successfully.')
|
295 |
|
296 |
elif next_action == 'search':
|
|
|
297 |
file_content_area = st.text_area("File Contents:", file_contents, height=500)
|
298 |
user_prompt = PromptPrefix2 + file_contents
|
299 |
st.markdown(user_prompt)
|
@@ -306,7 +311,6 @@ def FileSidebar():
|
|
306 |
if st.button('๐Run'):
|
307 |
st.write("Running GPT logic placeholder...")
|
308 |
|
309 |
-
|
310 |
# ---------------------------
|
311 |
# Basic Scoring / Glossaries
|
312 |
# ---------------------------
|
@@ -338,6 +342,9 @@ def load_score(key):
|
|
338 |
return 0
|
339 |
|
340 |
def display_buttons_with_scores(num_columns_text):
|
|
|
|
|
|
|
341 |
game_emojis = {
|
342 |
"Dungeons and Dragons": "๐",
|
343 |
"Call of Cthulhu": "๐",
|
@@ -373,16 +380,19 @@ def display_buttons_with_scores(num_columns_text):
|
|
373 |
newscore = update_score(key.replace('?',''))
|
374 |
st.markdown(f"Scored **{category} - {game} - {term}** -> {newscore}")
|
375 |
|
376 |
-
|
377 |
# --------------------
|
378 |
# Image & Video Grids
|
379 |
# --------------------
|
380 |
def display_images_and_wikipedia_summaries(num_columns=4):
|
|
|
|
|
|
|
381 |
image_files = [f for f in os.listdir('.') if f.endswith('.png')]
|
382 |
if not image_files:
|
383 |
st.write("No PNG images found in the current directory.")
|
384 |
return
|
385 |
|
|
|
386 |
image_files_sorted = sorted(image_files, key=lambda x: len(x.split('.')[0]))
|
387 |
cols = st.columns(num_columns)
|
388 |
col_index = 0
|
@@ -394,6 +404,7 @@ def display_images_and_wikipedia_summaries(num_columns=4):
|
|
394 |
st.image(image, use_column_width=True)
|
395 |
k = image_file.split('.')[0]
|
396 |
display_glossary_entity(k)
|
|
|
397 |
image_text_input = st.text_input(f"Prompt for {image_file}", key=f"image_prompt_{image_file}")
|
398 |
if len(image_text_input) > 0:
|
399 |
response = process_image(image_file, image_text_input)
|
@@ -403,6 +414,9 @@ def display_images_and_wikipedia_summaries(num_columns=4):
|
|
403 |
col_index += 1
|
404 |
|
405 |
def display_videos_and_links(num_columns=4):
|
|
|
|
|
|
|
406 |
video_files = [f for f in os.listdir('.') if f.endswith(('.mp4', '.webm'))]
|
407 |
if not video_files:
|
408 |
st.write("No MP4 or WEBM videos found in the current directory.")
|
@@ -417,9 +431,11 @@ def display_videos_and_links(num_columns=4):
|
|
417 |
k = video_file.split('.')[0]
|
418 |
st.video(video_file, format='video/mp4', start_time=0)
|
419 |
display_glossary_entity(k)
|
|
|
420 |
video_text_input = st.text_input(f"Video Prompt for {video_file}", key=f"video_prompt_{video_file}")
|
421 |
if video_text_input:
|
422 |
try:
|
|
|
423 |
seconds_per_frame = 10
|
424 |
process_video(video_file, seconds_per_frame)
|
425 |
except ValueError:
|
@@ -428,15 +444,19 @@ def display_videos_and_links(num_columns=4):
|
|
428 |
|
429 |
|
430 |
# -------------------------------------
|
431 |
-
# Query Param Helpers
|
432 |
# -------------------------------------
|
433 |
def get_all_query_params(key):
|
434 |
-
return st.query_params.get(key, [])
|
435 |
|
436 |
def clear_query_params():
|
437 |
-
st.query_params
|
438 |
|
439 |
def display_content_or_image(query):
|
|
|
|
|
|
|
|
|
440 |
for category, term_list in transhuman_glossary.items():
|
441 |
for term in term_list:
|
442 |
if query.lower() in term.lower():
|
@@ -452,9 +472,13 @@ def display_content_or_image(query):
|
|
452 |
|
453 |
|
454 |
# ------------------------------------
|
455 |
-
# MERMAID DIAGRAM with
|
456 |
# ------------------------------------
|
457 |
def generate_mermaid_html(mermaid_code: str) -> str:
|
|
|
|
|
|
|
|
|
458 |
return f"""
|
459 |
<html>
|
460 |
<head>
|
@@ -466,6 +490,7 @@ def generate_mermaid_html(mermaid_code: str) -> str:
|
|
466 |
margin: 20px auto;
|
467 |
}}
|
468 |
.mermaid {{
|
|
|
469 |
max-width: 800px;
|
470 |
}}
|
471 |
</style>
|
@@ -482,90 +507,105 @@ def generate_mermaid_html(mermaid_code: str) -> str:
|
|
482 |
"""
|
483 |
|
484 |
def append_model_param(url: str, model_selected: bool) -> str:
|
|
|
|
|
|
|
|
|
485 |
if not model_selected:
|
486 |
return url
|
487 |
delimiter = "&" if "?" in url else "?"
|
488 |
return f"{url}{delimiter}model=1"
|
489 |
|
490 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
491 |
def main():
|
492 |
st.set_page_config(page_title="Mermaid + Clickable Links Demo", layout="wide")
|
493 |
|
494 |
-
# 1) Parse query strings
|
495 |
-
query_params = st.
|
496 |
-
current_q = query_params.get("q", [""])[0]
|
497 |
current_r = query_params.get("r", [""])[0]
|
498 |
|
|
|
499 |
st.sidebar.write("## Diagram Link Settings")
|
500 |
model_selected = st.sidebar.checkbox("Append ?model=1 to each link?")
|
501 |
|
502 |
-
#
|
503 |
-
|
504 |
-
|
505 |
-
|
506 |
-
# Rebuild for clickable diagram with optional &model=1
|
507 |
-
base_diagram = mermaid_default or ""
|
508 |
lines = base_diagram.strip().split("\n")
|
509 |
new_lines = []
|
510 |
for line in lines:
|
511 |
if "click " in line and '"/?' in line:
|
512 |
-
#
|
513 |
-
|
514 |
-
|
515 |
-
if len(parts) ==
|
|
|
|
|
|
|
|
|
516 |
url = parts[1]
|
517 |
-
tooltip = parts[2]
|
518 |
-
target = parts[3]
|
519 |
updated_url = append_model_param(url, model_selected)
|
520 |
-
#
|
521 |
-
new_line = f"{parts[0]}
|
522 |
-
|
523 |
-
# We can do a capturing group for the node ID in a separate pattern or handle manually.
|
524 |
-
# For simplicity, let's just rewrite if the user used consistent 'click NodeID "URL" "Tooltip" "Target"'
|
525 |
-
# We'll do a second approach:
|
526 |
-
|
527 |
-
# We'll do a direct approach: parse manually with a simpler pattern
|
528 |
-
# Instead, let's keep it simpler for demonstration:
|
529 |
-
# If your usage is consistent, you can skip all this complexity and just do a standard line replacement.
|
530 |
-
|
531 |
-
new_lines.append("click fix: " + line) # placeholder, see below
|
532 |
else:
|
533 |
-
|
534 |
-
# We'll do the simpler approach: just find the quoted URL & update it
|
535 |
-
short_line = re.split(r'click\s+(\S+)\s+"([^"]+)"\s+"([^"]+)"\s+"([^"]+)"', line)
|
536 |
-
# If it doesn't match, we just leave it as is or attempt a simpler replace:
|
537 |
-
# We'll attempt a simpler approach with a single replace if the user always uses the second quote for the URL
|
538 |
-
# ...
|
539 |
-
updated_line = line
|
540 |
-
# We look for the second quoted substring if possible
|
541 |
-
# This can get complicated quickly, so let's keep it minimal:
|
542 |
-
updated_line = re.sub(r'click\s+(\S+)\s+"([^"]+)"\s+"([^"]+)"\s+"([^"]+)"',
|
543 |
-
lambda m: f'click {m.group(1)} "{append_model_param(m.group(2), model_selected)}" "{m.group(3)}" "{m.group(4)}"',
|
544 |
-
line)
|
545 |
-
new_lines.append(updated_line)
|
546 |
else:
|
547 |
new_lines.append(line)
|
548 |
mermaid_code = "\n".join(new_lines)
|
549 |
|
|
|
550 |
st.title("Top-Centered Mermaid Diagram with Clickable Links ๐บ")
|
551 |
diagram_html = generate_mermaid_html(mermaid_code)
|
552 |
components.html(diagram_html, height=400, scrolling=True)
|
553 |
|
554 |
-
# Show inbound ?q
|
555 |
if current_q:
|
556 |
st.markdown(f"**Detected Query**: `?q={current_q}`")
|
557 |
display_content_or_image(current_q)
|
558 |
if current_r:
|
559 |
st.markdown(f"**Detected Relationship**: `?r={current_r}`")
|
560 |
|
|
|
561 |
left_col, right_col = st.columns(2)
|
562 |
|
563 |
# --- Left: Markdown Editor
|
564 |
with left_col:
|
565 |
st.subheader("Markdown Side ๐")
|
566 |
if "markdown_text" not in st.session_state:
|
567 |
-
st.session_state["markdown_text"] =
|
568 |
-
|
569 |
markdown_text = st.text_area(
|
570 |
"Edit Markdown:",
|
571 |
value=st.session_state["markdown_text"],
|
@@ -573,26 +613,17 @@ def main():
|
|
573 |
)
|
574 |
st.session_state["markdown_text"] = markdown_text
|
575 |
|
576 |
-
|
|
|
577 |
with colA:
|
578 |
-
if st.button("๐ Refresh"):
|
579 |
st.write("**Markdown** content refreshed! ๐ฟ")
|
580 |
with colB:
|
581 |
-
if st.button("โ Clear"):
|
582 |
st.session_state["markdown_text"] = ""
|
583 |
-
st.
|
584 |
-
with colC:
|
585 |
-
if st.button("๐พ File Save"):
|
586 |
-
with open("MarkdownCode.md", 'w', encoding='utf-8') as f:
|
587 |
-
f.write(markdown_text)
|
588 |
-
st.success("Saved to MarkdownCode.md")
|
589 |
-
with colD:
|
590 |
-
md_filename = st.text_input("Filename for Markdown:", value="MarkdownCode.md", key="md_filename_key")
|
591 |
-
if st.button("๐พ Save As"):
|
592 |
-
with open(md_filename, 'w', encoding='utf-8') as f:
|
593 |
-
f.write(markdown_text)
|
594 |
-
st.success(f"Saved to {md_filename}")
|
595 |
|
|
|
596 |
st.markdown("---")
|
597 |
st.markdown("**Preview:**")
|
598 |
st.markdown(markdown_text)
|
@@ -600,41 +631,32 @@ def main():
|
|
600 |
# --- Right: Mermaid Editor
|
601 |
with right_col:
|
602 |
st.subheader("Mermaid Side ๐งโโ๏ธ")
|
|
|
603 |
if "current_mermaid" not in st.session_state:
|
604 |
-
st.session_state["current_mermaid"] =
|
605 |
|
606 |
mermaid_input = st.text_area(
|
607 |
"Edit Mermaid Code:",
|
608 |
value=st.session_state["current_mermaid"],
|
609 |
height=300
|
610 |
)
|
611 |
-
|
612 |
-
colC, colD, colE, colF = st.columns(4)
|
613 |
with colC:
|
614 |
-
if st.button("๐จ Refresh"):
|
|
|
615 |
st.session_state["current_mermaid"] = mermaid_input
|
616 |
st.write("**Mermaid** diagram refreshed! ๐")
|
617 |
-
st.
|
618 |
with colD:
|
619 |
-
if st.button("โ Clear "):
|
620 |
st.session_state["current_mermaid"] = ""
|
621 |
-
st.
|
622 |
-
with colE:
|
623 |
-
if st.button("๐พ File Save "):
|
624 |
-
with open("MermaidCode.md", 'w', encoding='utf-8') as f:
|
625 |
-
f.write(mermaid_input)
|
626 |
-
st.success("Saved to MermaidCode.md")
|
627 |
-
with colF:
|
628 |
-
mermaid_filename = st.text_input("Filename for Mermaid:", value="MermaidCode.md", key="mermaid_filename_key")
|
629 |
-
if st.button("๐พ Save As "):
|
630 |
-
with open(mermaid_filename, 'w', encoding='utf-8') as f:
|
631 |
-
f.write(mermaid_input)
|
632 |
-
st.success(f"Saved to {mermaid_filename}")
|
633 |
|
634 |
st.markdown("---")
|
635 |
st.markdown("**Mermaid Source:**")
|
636 |
st.code(mermaid_input, language="python", line_numbers=True)
|
637 |
|
|
|
638 |
st.markdown("---")
|
639 |
st.header("Media Galleries")
|
640 |
|
@@ -644,6 +666,7 @@ def main():
|
|
644 |
num_columns_video = st.slider("Choose Number of Video Columns", 1, 15, 5, key="num_columns_video")
|
645 |
display_videos_and_links(num_columns_video)
|
646 |
|
|
|
647 |
showExtendedTextInterface = False
|
648 |
if showExtendedTextInterface:
|
649 |
display_glossary_grid(roleplaying_glossary)
|
@@ -651,9 +674,10 @@ def main():
|
|
651 |
display_buttons_with_scores(num_columns_text)
|
652 |
st.markdown("Extended text interface is on...")
|
653 |
|
|
|
654 |
FileSidebar()
|
655 |
|
656 |
-
# Random Title at bottom
|
657 |
titles = [
|
658 |
"๐ง ๐ญ Semantic Symphonies & Episodic Encores",
|
659 |
"๐๐ผ AI Rhythms of Memory Lane",
|
@@ -667,13 +691,6 @@ def main():
|
|
667 |
selected_title = random.choice(titles)
|
668 |
st.markdown(f"**{selected_title}**")
|
669 |
|
670 |
-
def new_lines_joiner(prefix_str):
|
671 |
-
"""
|
672 |
-
A placeholder function if you needed to parse out the node name from prefix.
|
673 |
-
If not used, you can remove it. This is just a stub to illustrate
|
674 |
-
how you might handle the 'click NodeID "URL" "Tooltip" "Target"' pattern.
|
675 |
-
"""
|
676 |
-
return prefix_str # or parse out the node name
|
677 |
|
678 |
if __name__ == "__main__":
|
679 |
main()
|
|
|
15 |
import streamlit as st
|
16 |
import streamlit.components.v1 as components
|
17 |
|
18 |
+
# For demonstration, we'll import from huggingface_hub
|
19 |
+
# (You can omit if you're not using HF or adapt your own client)
|
20 |
from huggingface_hub import InferenceClient
|
21 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
22 |
# ----------------------------
|
23 |
# Placeholder data structures
|
24 |
# ----------------------------
|
25 |
+
|
26 |
+
# Example placeholders for prompt prefixes
|
27 |
PromptPrefix = "AI-Search: "
|
28 |
PromptPrefix2 = "AI-Refine: "
|
29 |
PromptPrefix3 = "AI-JS: "
|
30 |
|
31 |
+
# Minimal example of a roleplaying glossary
|
32 |
roleplaying_glossary = {
|
33 |
"Core Rulebooks": {
|
34 |
"Dungeons and Dragons": ["Player's Handbook", "Dungeon Master's Guide", "Monster Manual"],
|
|
|
39 |
}
|
40 |
}
|
41 |
|
42 |
+
# Minimal example of a transhuman glossary
|
43 |
transhuman_glossary = {
|
44 |
"Neural Interfaces": ["Cortex Jack", "Mind-Machine Fusion"],
|
45 |
"Cybernetics": ["Robotic Limbs", "Augmented Eyes"],
|
46 |
}
|
47 |
|
48 |
+
# Just to demonstrate how your "search_arxiv" or "SpeechSynthesis" etc. might be placeholders
|
|
|
|
|
49 |
def process_text(text):
|
50 |
st.write(f"process_text called with: {text}")
|
51 |
|
|
|
67 |
def search_glossary(content):
|
68 |
st.write(f"search_glossary called with: {content}")
|
69 |
|
70 |
+
# If you have HF Inference endpoint, set them here, else placeholders
|
71 |
API_URL = "https://huggingface-inference-endpoint-placeholder"
|
72 |
API_KEY = "hf_XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX"
|
73 |
|
|
|
81 |
# --------------------------------------
|
82 |
@st.cache_resource
|
83 |
def display_glossary_entity(k):
|
84 |
+
"""
|
85 |
+
Example of how you'd create multiple links for a glossary entity.
|
86 |
+
This was in your original snippet. We'll keep it short.
|
87 |
+
"""
|
88 |
search_urls = {
|
89 |
"๐๐ArXiv": lambda k: f"/?q={quote(k)}",
|
90 |
"๐Analyst": lambda k: f"/?q={quote(k)}-{quote(PromptPrefix)}",
|
|
|
102 |
|
103 |
@st.cache_resource
|
104 |
def display_glossary_grid(roleplaying_glossary):
|
105 |
+
"""
|
106 |
+
Displays a glossary in columns with multiple link emojis.
|
107 |
+
"""
|
108 |
search_urls = {
|
109 |
"๐๐ArXiv": lambda k: f"/?q={quote(k)}",
|
110 |
"๐Analyst": lambda k: f"/?q={quote(k)}-{quote(PromptPrefix)}",
|
|
|
155 |
return href
|
156 |
|
157 |
def get_table_download_link(file_path):
|
158 |
+
"""
|
159 |
+
Creates a download link for a single file from your snippet.
|
160 |
+
"""
|
161 |
try:
|
162 |
with open(file_path, 'r', encoding='utf-8') as file:
|
163 |
data = file.read()
|
|
|
183 |
return os.path.getsize(file_path)
|
184 |
|
185 |
def compare_and_delete_files(files):
|
186 |
+
"""
|
187 |
+
Compare file sizes. If duplicates exist, keep only the latest.
|
188 |
+
"""
|
189 |
if not files:
|
190 |
st.warning("No files to compare.")
|
191 |
return
|
|
|
193 |
for file in files:
|
194 |
size = os.path.getsize(file)
|
195 |
file_sizes.setdefault(size, []).append(file)
|
196 |
+
# Remove all but the latest file for each size
|
197 |
for size, paths in file_sizes.items():
|
198 |
if len(paths) > 1:
|
199 |
latest_file = max(paths, key=os.path.getmtime)
|
|
|
205 |
|
206 |
def FileSidebar():
|
207 |
"""
|
208 |
+
Renders the file sidebar with all the open/view/run/delete logic.
|
|
|
209 |
"""
|
210 |
all_files = glob.glob("*.md")
|
211 |
+
# Example logic filtering filenames
|
|
|
|
|
212 |
all_files = [file for file in all_files if len(os.path.splitext(file)[0]) >= 5]
|
213 |
all_files.sort(key=lambda x: (os.path.splitext(x)[1], x), reverse=True)
|
214 |
|
215 |
+
# Buttons for "Delete All" and "Download"
|
216 |
Files1, Files2 = st.sidebar.columns(2)
|
217 |
with Files1:
|
218 |
if st.button("๐ Delete All"):
|
|
|
228 |
file_name = ''
|
229 |
next_action = ''
|
230 |
|
231 |
+
# Each file row
|
232 |
for file in all_files:
|
233 |
col1, col2, col3, col4, col5 = st.sidebar.columns([1,6,1,1,1])
|
234 |
with col1:
|
235 |
+
# Show an emoji button to do "md"
|
236 |
if st.button("๐", key="md_"+file):
|
237 |
file_contents = load_file(file)
|
238 |
file_name = file
|
|
|
258 |
with col5:
|
259 |
if st.button("๐", key="delete_"+file):
|
260 |
os.remove(file)
|
261 |
+
file_name = file
|
262 |
st.rerun()
|
263 |
+
next_action = 'delete'
|
264 |
+
st.session_state['next_action'] = next_action
|
265 |
|
266 |
+
# Duplicate detection
|
267 |
file_sizes = [get_file_size(file) for file in all_files]
|
268 |
previous_size = None
|
269 |
st.sidebar.title("File Operations")
|
|
|
283 |
st.rerun()
|
284 |
previous_size = size
|
285 |
|
286 |
+
# If we have loaded something
|
287 |
if len(file_contents) > 0:
|
288 |
if next_action == 'open':
|
289 |
open1, open2 = st.columns([0.8, 0.2])
|
290 |
with open1:
|
291 |
+
file_name_input = st.text_input('File Name:', file_name, key='file_name_input', on_change=None)
|
292 |
file_content_area = st.text_area('File Contents:', file_contents, height=300, key='file_content_area')
|
293 |
|
294 |
+
# Minimal โSaveโ stubs
|
295 |
if st.button('๐พ Save File'):
|
296 |
with open(file_name_input, 'w', encoding='utf-8') as f:
|
297 |
f.write(file_content_area)
|
298 |
st.markdown(f'Saved {file_name_input} successfully.')
|
299 |
|
300 |
elif next_action == 'search':
|
301 |
+
# Example usage
|
302 |
file_content_area = st.text_area("File Contents:", file_contents, height=500)
|
303 |
user_prompt = PromptPrefix2 + file_contents
|
304 |
st.markdown(user_prompt)
|
|
|
311 |
if st.button('๐Run'):
|
312 |
st.write("Running GPT logic placeholder...")
|
313 |
|
|
|
314 |
# ---------------------------
|
315 |
# Basic Scoring / Glossaries
|
316 |
# ---------------------------
|
|
|
342 |
return 0
|
343 |
|
344 |
def display_buttons_with_scores(num_columns_text):
|
345 |
+
"""
|
346 |
+
Show buttons that track a 'score' from your glossary data.
|
347 |
+
"""
|
348 |
game_emojis = {
|
349 |
"Dungeons and Dragons": "๐",
|
350 |
"Call of Cthulhu": "๐",
|
|
|
380 |
newscore = update_score(key.replace('?',''))
|
381 |
st.markdown(f"Scored **{category} - {game} - {term}** -> {newscore}")
|
382 |
|
|
|
383 |
# --------------------
|
384 |
# Image & Video Grids
|
385 |
# --------------------
|
386 |
def display_images_and_wikipedia_summaries(num_columns=4):
|
387 |
+
"""
|
388 |
+
Display all .png images in the current directory in a grid, referencing the name as a 'keyword'.
|
389 |
+
"""
|
390 |
image_files = [f for f in os.listdir('.') if f.endswith('.png')]
|
391 |
if not image_files:
|
392 |
st.write("No PNG images found in the current directory.")
|
393 |
return
|
394 |
|
395 |
+
# Sort by length of filename, just as an example
|
396 |
image_files_sorted = sorted(image_files, key=lambda x: len(x.split('.')[0]))
|
397 |
cols = st.columns(num_columns)
|
398 |
col_index = 0
|
|
|
404 |
st.image(image, use_column_width=True)
|
405 |
k = image_file.split('.')[0]
|
406 |
display_glossary_entity(k)
|
407 |
+
# Provide a text input for user interactions
|
408 |
image_text_input = st.text_input(f"Prompt for {image_file}", key=f"image_prompt_{image_file}")
|
409 |
if len(image_text_input) > 0:
|
410 |
response = process_image(image_file, image_text_input)
|
|
|
414 |
col_index += 1
|
415 |
|
416 |
def display_videos_and_links(num_columns=4):
|
417 |
+
"""
|
418 |
+
Displays all .mp4 or .webm videos found in the current directory in a grid.
|
419 |
+
"""
|
420 |
video_files = [f for f in os.listdir('.') if f.endswith(('.mp4', '.webm'))]
|
421 |
if not video_files:
|
422 |
st.write("No MP4 or WEBM videos found in the current directory.")
|
|
|
431 |
k = video_file.split('.')[0]
|
432 |
st.video(video_file, format='video/mp4', start_time=0)
|
433 |
display_glossary_entity(k)
|
434 |
+
# Provide a text input
|
435 |
video_text_input = st.text_input(f"Video Prompt for {video_file}", key=f"video_prompt_{video_file}")
|
436 |
if video_text_input:
|
437 |
try:
|
438 |
+
# Hard-coded example
|
439 |
seconds_per_frame = 10
|
440 |
process_video(video_file, seconds_per_frame)
|
441 |
except ValueError:
|
|
|
444 |
|
445 |
|
446 |
# -------------------------------------
|
447 |
+
# Query Param Helpers from your snippet
|
448 |
# -------------------------------------
|
449 |
def get_all_query_params(key):
|
450 |
+
return st.query_params().get(key, [])
|
451 |
|
452 |
def clear_query_params():
|
453 |
+
st.query_params()
|
454 |
|
455 |
def display_content_or_image(query):
|
456 |
+
"""
|
457 |
+
If a query matches something in transhuman_glossary or
|
458 |
+
a local image, show it. Otherwise warn no match.
|
459 |
+
"""
|
460 |
for category, term_list in transhuman_glossary.items():
|
461 |
for term in term_list:
|
462 |
if query.lower() in term.lower():
|
|
|
472 |
|
473 |
|
474 |
# ------------------------------------
|
475 |
+
# MERMAID DIAGRAM with Clickable Links
|
476 |
# ------------------------------------
|
477 |
def generate_mermaid_html(mermaid_code: str) -> str:
|
478 |
+
"""
|
479 |
+
Returns HTML embedding a Mermaid diagram. We embed the code
|
480 |
+
in <div class="mermaid"> and center it with CSS.
|
481 |
+
"""
|
482 |
return f"""
|
483 |
<html>
|
484 |
<head>
|
|
|
490 |
margin: 20px auto;
|
491 |
}}
|
492 |
.mermaid {{
|
493 |
+
/* Let the diagram scale or otherwise style as you wish */
|
494 |
max-width: 800px;
|
495 |
}}
|
496 |
</style>
|
|
|
507 |
"""
|
508 |
|
509 |
def append_model_param(url: str, model_selected: bool) -> str:
|
510 |
+
"""
|
511 |
+
If 'Model' checkbox is selected, we append '&model=1' or '?model=1' to the URL.
|
512 |
+
We'll handle whether the URL already has a '?' or not.
|
513 |
+
"""
|
514 |
if not model_selected:
|
515 |
return url
|
516 |
delimiter = "&" if "?" in url else "?"
|
517 |
return f"{url}{delimiter}model=1"
|
518 |
|
519 |
|
520 |
+
# For demonstration, we add clickable nodes & edges:
|
521 |
+
# click <nodeId> "<URL>" "_self"
|
522 |
+
# If you want edges to be clickable, you can label them as well,
|
523 |
+
# but Mermaid typically only has a 'click' property for nodes.
|
524 |
+
DEFAULT_MERMAID = """
|
525 |
+
flowchart LR
|
526 |
+
%% Notice we have "click LLM ..." lines:
|
527 |
+
U((User ๐)) -- "Talk ๐ฃ๏ธ" --> LLM[LLM Agent ๐ค\\nExtract Info]
|
528 |
+
click U "/?q=User%20๐" _self
|
529 |
+
click LLM "/?q=LLM%20Agent%20Extract%20Info" _self
|
530 |
+
|
531 |
+
LLM -- "Query ๐" --> HS[Hybrid Search ๐\\nVector+NER+Lexical]
|
532 |
+
click HS "/?q=Hybrid%20Search%20Vector+NER+Lexical" _self
|
533 |
+
|
534 |
+
HS -- "Reason ๐ค" --> RE[Reasoning Engine ๐ ๏ธ\\nNeuralNetwork+Medical]
|
535 |
+
click RE "/?q=Reasoning%20Engine%20NeuralNetwork+Medical" _self
|
536 |
+
|
537 |
+
RE -- "Link ๐ก" --> KG((Knowledge Graph ๐\\nOntology+GAR+RAG))
|
538 |
+
click KG "/?q=Knowledge%20Graph%20Ontology+GAR+RAG" _self
|
539 |
+
|
540 |
+
%% If you want an "edge click" to pass ?r= something,
|
541 |
+
%% Mermaid doesn't have direct 'click' for edges,
|
542 |
+
%% but you can define them as nodes or use linkStyle trick, etc.
|
543 |
+
"""
|
544 |
+
|
545 |
+
|
546 |
+
# ---------------------------
|
547 |
+
# Streamlit Main App
|
548 |
+
# ---------------------------
|
549 |
def main():
|
550 |
st.set_page_config(page_title="Mermaid + Clickable Links Demo", layout="wide")
|
551 |
|
552 |
+
# 1) Parse query strings on page load
|
553 |
+
query_params = st.experimental_get_query_params()
|
554 |
+
current_q = query_params.get("q", [""])[0] # If present, first string
|
555 |
current_r = query_params.get("r", [""])[0]
|
556 |
|
557 |
+
# 2) Let user pick if they want to add the "model=1" param to clickable links
|
558 |
st.sidebar.write("## Diagram Link Settings")
|
559 |
model_selected = st.sidebar.checkbox("Append ?model=1 to each link?")
|
560 |
|
561 |
+
# 3) Generate a dynamic Mermaid code, appending model param if user wants
|
562 |
+
# We'll do a simple string replace to incorporate the model param
|
563 |
+
# For a robust approach, parse each URL carefully, then reassemble.
|
564 |
+
base_diagram = DEFAULT_MERMAID
|
|
|
|
|
565 |
lines = base_diagram.strip().split("\n")
|
566 |
new_lines = []
|
567 |
for line in lines:
|
568 |
if "click " in line and '"/?' in line:
|
569 |
+
# e.g. click LLM "/?q=LLM%20Agent" _self
|
570 |
+
# let's isolate the URL part
|
571 |
+
parts = re.split(r'click\s+\S+\s+"([^"]+)"\s+("_self")', line)
|
572 |
+
if len(parts) == 4:
|
573 |
+
# parts[0] = 'click LLM '
|
574 |
+
# parts[1] = '/?q=LLM%20Agent%20Extract%20Info'
|
575 |
+
# parts[2] = ' _self'
|
576 |
+
# parts[3] = '' (trailing possibly)
|
577 |
url = parts[1]
|
|
|
|
|
578 |
updated_url = append_model_param(url, model_selected)
|
579 |
+
# Recombine
|
580 |
+
new_line = f"{parts[0]}\"{updated_url}\" {parts[2]}"
|
581 |
+
new_lines.append(new_line)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
582 |
else:
|
583 |
+
new_lines.append(line)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
584 |
else:
|
585 |
new_lines.append(line)
|
586 |
mermaid_code = "\n".join(new_lines)
|
587 |
|
588 |
+
# 4) Render the top-centered Mermaid diagram
|
589 |
st.title("Top-Centered Mermaid Diagram with Clickable Links ๐บ")
|
590 |
diagram_html = generate_mermaid_html(mermaid_code)
|
591 |
components.html(diagram_html, height=400, scrolling=True)
|
592 |
|
593 |
+
# 5) Show what the inbound ?q / ?r was
|
594 |
if current_q:
|
595 |
st.markdown(f"**Detected Query**: `?q={current_q}`")
|
596 |
display_content_or_image(current_q)
|
597 |
if current_r:
|
598 |
st.markdown(f"**Detected Relationship**: `?r={current_r}`")
|
599 |
|
600 |
+
# 6) Editor Columns: Markdown & Mermaid
|
601 |
left_col, right_col = st.columns(2)
|
602 |
|
603 |
# --- Left: Markdown Editor
|
604 |
with left_col:
|
605 |
st.subheader("Markdown Side ๐")
|
606 |
if "markdown_text" not in st.session_state:
|
607 |
+
st.session_state["markdown_text"] = "## Hello!\nType some *Markdown* here.\n"
|
608 |
+
# Text area
|
609 |
markdown_text = st.text_area(
|
610 |
"Edit Markdown:",
|
611 |
value=st.session_state["markdown_text"],
|
|
|
613 |
)
|
614 |
st.session_state["markdown_text"] = markdown_text
|
615 |
|
616 |
+
# Button row
|
617 |
+
colA, colB = st.columns(2)
|
618 |
with colA:
|
619 |
+
if st.button("๐ Refresh Markdown"):
|
620 |
st.write("**Markdown** content refreshed! ๐ฟ")
|
621 |
with colB:
|
622 |
+
if st.button("โ Clear Markdown"):
|
623 |
st.session_state["markdown_text"] = ""
|
624 |
+
st.experimental_rerun()
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
625 |
|
626 |
+
# Display
|
627 |
st.markdown("---")
|
628 |
st.markdown("**Preview:**")
|
629 |
st.markdown(markdown_text)
|
|
|
631 |
# --- Right: Mermaid Editor
|
632 |
with right_col:
|
633 |
st.subheader("Mermaid Side ๐งโโ๏ธ")
|
634 |
+
|
635 |
if "current_mermaid" not in st.session_state:
|
636 |
+
st.session_state["current_mermaid"] = mermaid_code
|
637 |
|
638 |
mermaid_input = st.text_area(
|
639 |
"Edit Mermaid Code:",
|
640 |
value=st.session_state["current_mermaid"],
|
641 |
height=300
|
642 |
)
|
643 |
+
colC, colD = st.columns(2)
|
|
|
644 |
with colC:
|
645 |
+
if st.button("๐จ Refresh Diagram"):
|
646 |
+
# Rebuild the diagram
|
647 |
st.session_state["current_mermaid"] = mermaid_input
|
648 |
st.write("**Mermaid** diagram refreshed! ๐")
|
649 |
+
st.experimental_rerun()
|
650 |
with colD:
|
651 |
+
if st.button("โ Clear Mermaid"):
|
652 |
st.session_state["current_mermaid"] = ""
|
653 |
+
st.experimental_rerun()
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
654 |
|
655 |
st.markdown("---")
|
656 |
st.markdown("**Mermaid Source:**")
|
657 |
st.code(mermaid_input, language="python", line_numbers=True)
|
658 |
|
659 |
+
# 7) Show Sliders & image/video galleries
|
660 |
st.markdown("---")
|
661 |
st.header("Media Galleries")
|
662 |
|
|
|
666 |
num_columns_video = st.slider("Choose Number of Video Columns", 1, 15, 5, key="num_columns_video")
|
667 |
display_videos_and_links(num_columns_video)
|
668 |
|
669 |
+
# 8) Optional "Extended" UI
|
670 |
showExtendedTextInterface = False
|
671 |
if showExtendedTextInterface:
|
672 |
display_glossary_grid(roleplaying_glossary)
|
|
|
674 |
display_buttons_with_scores(num_columns_text)
|
675 |
st.markdown("Extended text interface is on...")
|
676 |
|
677 |
+
# 9) Render the file sidebar
|
678 |
FileSidebar()
|
679 |
|
680 |
+
# 10) Random Title at bottom
|
681 |
titles = [
|
682 |
"๐ง ๐ญ Semantic Symphonies & Episodic Encores",
|
683 |
"๐๐ผ AI Rhythms of Memory Lane",
|
|
|
691 |
selected_title = random.choice(titles)
|
692 |
st.markdown(f"**{selected_title}**")
|
693 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
694 |
|
695 |
if __name__ == "__main__":
|
696 |
main()
|