File size: 3,097 Bytes
d28a1b5
 
 
 
 
 
 
 
 
 
 
 
8b38625
d28a1b5
 
 
8b38625
 
d28a1b5
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
e60e5d3
d28a1b5
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
8b38625
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
#import streamlit as st
#from gradio_client import Client
#client = Client("awacke1/Arxiv-Paper-Search-And-QA-RAG-Pattern")
#result = client.predict(
#		"What is Semantic and Episodic memory?",	# str  in 'Search' Textbox component
#		4,	# float (numeric value between 4 and 10) in 'Top n results as context' Slider component
#		"Semantic Search - up to 10 Mar 2024",	# Literal['Semantic Search - up to 10 Mar 2024', 'Arxiv Search - Latest - (EXPERIMENTAL)']  in 'Search Source' Dropdown component
#		"mistralai/Mixtral-8x7B-Instruct-v0.1",	# Literal['mistralai/Mixtral-8x7B-Instruct-v0.1', 'mistralai/Mistral-7B-Instruct-v0.2', 'google/gemma-7b-it', 'None']  in 'LLM Model' Dropdown component
#		api_name="/update_with_rag_md"
#)
#st.markdown(result)


import streamlit as st
import os
from datetime import datetime
from gradio_client import Client

def save_file(content, file_type):
    timestamp = datetime.now().strftime("%Y%m%d_%H%M%S")
    file_name = f"{file_type}_{timestamp}.md"
    with open(file_name, "w") as file:
        file.write(content)
    return file_name

def load_file(file_name):
    with open(file_name, "r") as file:
        content = file.read()
    return content

def main():
    st.set_page_config(page_title="Memory Flag System")
    st.title("Memory Flag System")

    client = Client("awacke1/Arxiv-Paper-Search-And-QA-RAG-Pattern")

    search_query = st.text_input("Search")
    top_n_results = st.slider("Top n results as context", min_value=4, max_value=100, value=100)
    search_source = st.selectbox("Search Source", ["Semantic Search - up to 10 Mar 2024", "Arxiv Search - Latest - (EXPERIMENTAL)"])
    llm_model = st.selectbox("LLM Model", ["mistralai/Mixtral-8x7B-Instruct-v0.1", "mistralai/Mistral-7B-Instruct-v0.2", "google/gemma-7b-it", "None"])

    if st.button("Search"):
        result = client.predict(
            search_query,
            top_n_results,
            search_source,
            llm_model,
            api_name="/update_with_rag_md"
        )
        st.markdown(result)

        file_type = st.radio("Select Memory Flag", ("Semantic", "Episodic"))
        if st.button("Save"):
            file_name = save_file(result, file_type)
            st.success(f"File saved: {file_name}")

    saved_files = [f for f in os.listdir(".") if f.endswith(".md")]
    selected_file = st.sidebar.selectbox("Saved Files", saved_files)

    if selected_file:
        file_content = load_file(selected_file)
        st.sidebar.markdown(file_content)

        if st.sidebar.button("πŸ“ Edit"):
            edited_content = st.text_area("Edit File", value=file_content, height=400)
            new_file_name = st.text_input("File Name", value=selected_file)
            if st.button("πŸ’Ύ Save"):
                with open(new_file_name, "w") as file:
                    file.write(edited_content)
                st.success(f"File updated: {new_file_name}")

        if st.sidebar.button("πŸ—‘οΈ Delete"):
            os.remove(selected_file)
            st.warning(f"File deleted: {selected_file}")

if __name__ == "__main__":
    main()