|
| 1 | +import numpy as np |
| 2 | +import pandas as pd |
| 3 | +from transformers import AutoTokenizer, AutoModelForSequenceClassification |
| 4 | +from scipy.special import softmax |
| 5 | + |
| 6 | +import taipy.gui.builder as tgb |
| 7 | +from taipy.gui import notify, Gui |
| 8 | + |
| 9 | +# ---------------------------------- |
| 10 | +# Model & Data Initialization |
| 11 | +# ---------------------------------- |
| 12 | +MODEL = "sbcBI/sentiment_analysis_model" |
| 13 | +tokenizer = AutoTokenizer.from_pretrained(MODEL) |
| 14 | +model = AutoModelForSequenceClassification.from_pretrained(MODEL) |
| 15 | + |
| 16 | +text = "Original text" |
| 17 | +dataframe = pd.DataFrame( |
| 18 | + { |
| 19 | + "Text": [""], |
| 20 | + "Score Pos": [0.33], |
| 21 | + "Score Neu": [0.33], |
| 22 | + "Score Neg": [0.33], |
| 23 | + "Overall": [0], |
| 24 | + } |
| 25 | +) |
| 26 | +dataframe2 = dataframe.copy() |
| 27 | + |
| 28 | +path = "" |
| 29 | +treatment = 0 |
| 30 | + |
| 31 | +# ---------------------------------- |
| 32 | +# Helper Functions |
| 33 | +# ---------------------------------- |
| 34 | +def analyze_text(input_text: str) -> dict: |
| 35 | + """ |
| 36 | + Runs the sentiment analysis model on the text. |
| 37 | + Returns a dictionary with the scores. |
| 38 | + """ |
| 39 | + encoded_text = tokenizer(input_text, return_tensors="pt") |
| 40 | + output = model(**encoded_text) |
| 41 | + scores = output[0][0].detach().numpy() |
| 42 | + scores = softmax(scores) |
| 43 | + |
| 44 | + return { |
| 45 | + "Text": input_text[:50], |
| 46 | + "Score Pos": scores[2], |
| 47 | + "Score Neu": scores[1], |
| 48 | + "Score Neg": scores[0], |
| 49 | + "Overall": scores[2] - scores[0], |
| 50 | + } |
| 51 | + |
| 52 | + |
| 53 | +def local_callback(state): |
| 54 | + """ |
| 55 | + Analyze the text and update the main dataframe. |
| 56 | + """ |
| 57 | + notify(state, "Info", f"The text is: {state.text}", True) |
| 58 | + temp = state.dataframe.copy() |
| 59 | + scores = analyze_text(state.text) |
| 60 | + temp.loc[len(temp)] = scores |
| 61 | + state.dataframe = temp |
| 62 | + state.text = "" |
| 63 | + |
| 64 | + |
| 65 | +def analyze_file(state): |
| 66 | + """ |
| 67 | + Analyze each line of the uploaded text file and update `dataframe2`. |
| 68 | + """ |
| 69 | + state.dataframe2 = dataframe2 |
| 70 | + state.treatment = 0 |
| 71 | + |
| 72 | + with open(state.path, "r", encoding="utf-8") as f: |
| 73 | + data = f.read() |
| 74 | + file_list = data.split("\n") |
| 75 | + |
| 76 | + for i, input_text in enumerate(file_list): |
| 77 | + state.treatment = int((i + 1) * 100 / len(file_list)) |
| 78 | + temp = state.dataframe2.copy() |
| 79 | + scores = analyze_text(input_text) |
| 80 | + temp.loc[len(temp)] = scores |
| 81 | + state.dataframe2 = temp |
| 82 | + |
| 83 | + state.path = None |
| 84 | + |
| 85 | + |
| 86 | +# ---------------------------------- |
| 87 | +# Building Pages with TGB |
| 88 | +# ---------------------------------- |
| 89 | + |
| 90 | +# --------------------- |
| 91 | +# Home Page ("/") |
| 92 | +# --------------------- |
| 93 | +with tgb.Page() as root_page: |
| 94 | + tgb.toggle(theme=True) |
| 95 | + tgb.navbar() |
| 96 | + |
| 97 | +# --------------------- |
| 98 | +# "line" Page |
| 99 | +# --------------------- |
| 100 | +with tgb.Page() as page: |
| 101 | + tgb.text("# Getting started with **Taipy** GUI", mode="md") |
| 102 | + |
| 103 | + # Layout with two columns |
| 104 | + tgb.text("**My text:** {text}", mode="md") |
| 105 | + tgb.input("{text}", label="Enter a word:") |
| 106 | + tgb.button("Analyze", on_action=local_callback) |
| 107 | + |
| 108 | + # Display the main dataframe as a table |
| 109 | + tgb.text("### Analyzed Entries", mode="md") |
| 110 | + tgb.table("{dataframe}", number_format="%.2f") |
| 111 | + |
| 112 | + # Summaries in a 1-1-1 layout |
| 113 | + with tgb.layout(columns="1 1 1"): |
| 114 | + tgb.text(lambda dataframe: f"## Positive {np.mean(dataframe['Score Pos']):.2f}", class_name="h4") |
| 115 | + |
| 116 | + tgb.text(lambda dataframe: f" ## Neutral{np.mean(dataframe['Score Neu']):.2f}", class_name="h4") |
| 117 | + |
| 118 | + tgb.text(lambda dataframe: f" ## Negative{np.mean(dataframe['Score Neg']):.2f}", class_name="h4") |
| 119 | + |
| 120 | + # Bar + line chart |
| 121 | + tgb.chart( |
| 122 | + "{dataframe}", |
| 123 | + x="Text", |
| 124 | + y=["Score Pos", "Score Neu", "Score Neg", "Overall"], |
| 125 | + color=["green", "grey", "red", "yellow"], |
| 126 | + type=["bar", "bar", "bar", "line"], # or pass a list for 'type' |
| 127 | + title="Sentiment Trends", |
| 128 | + ) |
| 129 | + |
| 130 | +# --------------------- |
| 131 | +# "text" Page |
| 132 | +# --------------------- |
| 133 | +with tgb.Page() as page_file: |
| 134 | + tgb.text("## File Uploader", mode="md") |
| 135 | + # File selector & progress text |
| 136 | + tgb.file_selector( |
| 137 | + "{path}", |
| 138 | + label="Upload .txt file", |
| 139 | + extensions=".txt", |
| 140 | + on_action=analyze_file |
| 141 | + ) |
| 142 | + tgb.text(lambda treatment: f"Downloading {treatment}%...", mode="md") |
| 143 | + |
| 144 | + tgb.text("### Analyzed Entries from File", mode="md") |
| 145 | + tgb.table("{dataframe2}", number_format="%.2f") |
| 146 | + |
| 147 | + # Bar + line chart for file-based results |
| 148 | + tgb.chart( |
| 149 | + "{dataframe2}", |
| 150 | + type="bar", |
| 151 | + x="Text", |
| 152 | + y=["Score Pos", "Score Neu", "Score Neg", "Overall"], |
| 153 | + color=["green", "grey", "red", None], |
| 154 | + subtypes={"Overall": "line"}, |
| 155 | + title="Sentiment from File", |
| 156 | + height="600px", |
| 157 | + ) |
| 158 | + |
| 159 | +# --------------------- |
| 160 | +# Run the App |
| 161 | +# --------------------- |
| 162 | +pages = { |
| 163 | + "/": root_page, |
| 164 | + "line": page, |
| 165 | + "text": page_file, |
| 166 | +} |
| 167 | +gui = Gui(pages=pages) |
| 168 | +gui.run(title="Sentiment Analysis", dark_mode=True) |
| 169 | + |
0 commit comments