Skip to content

Commit 3c41685

Browse files
fix(webui): fix lint problems & fix typo
1 parent dd0377f commit 3c41685

2 files changed

Lines changed: 5 additions & 4 deletions

File tree

webui/app.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -203,7 +203,7 @@ def sum_tokens(client):
203203

204204
progress(1.0, "Graph traversed")
205205
return output_file, gr.DataFrame(label='Token Stats',
206-
headers=["Source Text Token Count", "Predicted Token Count", "Token Used"],
206+
headers=["Source Text Token Count", "Expected Token Usage", "Token Used"],
207207
datatype=["str", "str", "str"],
208208
interactive=False,
209209
value=data_frame,
@@ -389,7 +389,7 @@ def sum_tokens(client):
389389

390390
with gr.Blocks():
391391
token_counter = gr.DataFrame(label='Token Stats',
392-
headers=["Source Text Token Count", "Predicted Token Count", "Token Used"],
392+
headers=["Source Text Token Count", "Estimated Token Usage", "Token Used"],
393393
datatype=["str", "str", "str"],
394394
interactive=False,
395395
visible=False,

webui/count_tokens.py

Lines changed: 3 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -2,9 +2,10 @@
22
import sys
33
import json
44
import pandas as pd
5+
6+
# pylint: disable=wrong-import-position
57
root_dir = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
68
sys.path.append(root_dir)
7-
89
from graphgen.models import Tokenizer
910

1011
def count_tokens(file, tokenizer_name, data_frame):
@@ -50,7 +51,7 @@ def count_tokens(file, tokenizer_name, data_frame):
5051
)
5152
data_frame = new_df
5253

53-
except Exception as e:
54+
except Exception as e: # pylint: disable=broad-except
5455
print("[ERROR] DataFrame操作异常:", str(e))
5556

5657
return data_frame

0 commit comments

Comments
 (0)