|
import streamlit as st |
|
import json |
|
import pandas as pd |
|
import os |
|
|
|
st.set_page_config(page_title="Dataset Builder and Editor", layout="wide") |
|
st.title("π JSONL Dataset Builder and Editor") |
|
|
|
TMP_DIR = "temp" |
|
TMP_FILE = os.path.join(TMP_DIR, "session_dataset.jsonl") |
|
META_FILE = os.path.join(TMP_DIR, "metadata.json") |
|
|
|
|
|
os.makedirs(TMP_DIR, exist_ok=True) |
|
|
|
|
|
|
|
def get_all_fields(data): |
|
all_keys = set() |
|
for record in data: |
|
all_keys.update(record.keys()) |
|
return sorted(all_keys) |
|
|
|
|
|
if st.button("π Reset Session"): |
|
st.session_state.clear() |
|
if os.path.exists(TMP_FILE): |
|
os.remove(TMP_FILE) |
|
if os.path.exists(META_FILE): |
|
os.remove(META_FILE) |
|
st.success("π§Ή Session has been reset. Starting fresh!") |
|
st.rerun() |
|
|
|
|
|
if "data" not in st.session_state: |
|
if os.path.exists(TMP_FILE): |
|
with open(TMP_FILE, "r", encoding="utf-8") as f: |
|
st.session_state.data = [json.loads(line) for line in f] |
|
else: |
|
st.session_state.data = [] |
|
|
|
if os.path.exists(META_FILE): |
|
with open(META_FILE, "r", encoding="utf-8") as f: |
|
metadata = json.load(f) |
|
st.session_state.all_fields = [ |
|
f["name"] for f in metadata.get("fields", []) |
|
] |
|
st.session_state.field_types = { |
|
f["name"]: f.get("type", "text") for f in metadata.get("fields", []) |
|
} |
|
else: |
|
st.session_state.all_fields = get_all_fields(st.session_state.data) |
|
st.session_state.field_types = { |
|
field: "text" for field in st.session_state.all_fields |
|
} |
|
|
|
|
|
uploaded_file = st.file_uploader("Upload a JSONL file", type=["jsonl"]) |
|
|
|
if uploaded_file: |
|
|
|
content = uploaded_file.read().decode("utf-8") |
|
st.session_state.data = [json.loads(line) for line in content.strip().splitlines()] |
|
|
|
|
|
st.session_state.all_fields = get_all_fields(st.session_state.data) |
|
|
|
|
|
if "field_types" in st.session_state: |
|
for field in st.session_state.all_fields: |
|
if field not in st.session_state.field_types: |
|
st.session_state.field_types[field] = ( |
|
"textarea" |
|
) |
|
else: |
|
|
|
st.session_state.field_types = { |
|
field: "text" for field in st.session_state.all_fields |
|
} |
|
|
|
|
|
with open(TMP_FILE, "w", encoding="utf-8") as f: |
|
for item in st.session_state.data: |
|
f.write(json.dumps(item, ensure_ascii=False) + "\n") |
|
|
|
|
|
metadata = { |
|
"fields": [ |
|
{"name": field, "type": st.session_state.field_types.get(field, "text")} |
|
for field in st.session_state.all_fields |
|
] |
|
} |
|
with open(META_FILE, "w", encoding="utf-8") as f: |
|
json.dump(metadata, f, indent=2, ensure_ascii=False) |
|
|
|
|
|
st.success( |
|
f"Loaded {len(st.session_state.data)} records with fields: {st.session_state.all_fields}" |
|
) |
|
|
|
|
|
if not st.session_state.data and not st.session_state.all_fields: |
|
st.session_state.all_fields = [] |
|
|
|
|
|
if st.session_state.data: |
|
st.markdown("### βοΈ Edit Records") |
|
|
|
df = pd.DataFrame(st.session_state.data) |
|
df = df.reindex(columns=st.session_state.all_fields) |
|
|
|
|
|
for field in st.session_state.all_fields: |
|
df[field] = df[field].astype(str) |
|
|
|
|
|
if "field_types" not in st.session_state: |
|
st.session_state.field_types = { |
|
field: "text" for field in st.session_state.all_fields |
|
} |
|
|
|
|
|
column_configs = { |
|
field: ( |
|
st.column_config.TextColumn(label=field, width="large") |
|
if st.session_state.field_types.get(field) == "textarea" |
|
else st.column_config.TextColumn(label=field) |
|
) |
|
for field in st.session_state.all_fields |
|
} |
|
|
|
edited_df = st.data_editor( |
|
df, |
|
use_container_width=True, |
|
num_rows="dynamic", |
|
column_config=column_configs, |
|
) |
|
|
|
if not edited_df.equals(df): |
|
st.session_state.data = edited_df.fillna("").to_dict(orient="records") |
|
with open(TMP_FILE, "w", encoding="utf-8") as f: |
|
for item in st.session_state.data: |
|
f.write(json.dumps(item, ensure_ascii=False) + "\n") |
|
st.toast("β
Auto-saved!", icon="πΎ") |
|
st.rerun() |
|
|
|
|
|
if st.session_state.all_fields: |
|
st.markdown("### β Add New Entry") |
|
|
|
with st.form("new_entry_form"): |
|
new_record = {} |
|
|
|
|
|
if "reset_form" in st.session_state and st.session_state.reset_form: |
|
|
|
for field in st.session_state.all_fields: |
|
st.session_state[f"input_{field}"] = "" |
|
st.session_state.reset_form = ( |
|
False |
|
) |
|
|
|
|
|
for field in st.session_state.all_fields: |
|
input_type = st.session_state.field_types.get(field, "text") |
|
if input_type == "textarea": |
|
new_record[field] = st.text_area(f"{field}", key=f"input_{field}") |
|
else: |
|
new_record[field] = st.text_input(f"{field}", key=f"input_{field}") |
|
|
|
submitted = st.form_submit_button("Add Entry") |
|
|
|
if submitted: |
|
|
|
st.session_state.data.append(new_record) |
|
|
|
|
|
with open(TMP_FILE, "w", encoding="utf-8") as f: |
|
for item in st.session_state.data: |
|
f.write(json.dumps(item, ensure_ascii=False) + "\n") |
|
|
|
|
|
st.session_state.reset_form = True |
|
|
|
|
|
st.success("β
New entry added!") |
|
st.rerun() |
|
|
|
|
|
with st.expander("β Add New Field"): |
|
new_field = st.text_input("Field name", key="new_field_name") |
|
new_type = st.selectbox("Field type", ["text", "textarea"], key="new_field_type") |
|
if st.button("Add Field"): |
|
if new_field and new_field not in st.session_state.all_fields: |
|
st.session_state.all_fields.append(new_field) |
|
st.session_state.field_types[new_field] = new_type |
|
|
|
fields_metadata = [ |
|
{"name": f, "type": st.session_state.field_types[f]} |
|
for f in st.session_state.all_fields |
|
] |
|
with open(META_FILE, "w", encoding="utf-8") as f: |
|
json.dump({"fields": fields_metadata}, f, indent=2, ensure_ascii=False) |
|
st.success(f"β
Field '{new_field}' added!") |
|
st.rerun() |
|
|
|
|
|
st.markdown("### π€ Download Dataset") |
|
|
|
|
|
dataset_content = "\n".join( |
|
[json.dumps(row, ensure_ascii=False) for row in st.session_state.data] |
|
) |
|
|
|
if os.path.exists(TMP_FILE): |
|
|
|
st.download_button( |
|
label="β¬οΈ Download Dataset as JSONL", |
|
data=dataset_content, |
|
file_name="session_dataset.jsonl", |
|
mime="application/json", |
|
) |
|
else: |
|
st.warning("Dataset not yet generated!") |
|
|