1+ import os
2+ import sys
3+
4+ # --- THE MAGIC SHIELD: MUST BE AT THE TOP ---
5+ os .environ ["TF_USE_LEGACY_KERAS" ] = "1"
6+ import tensorflow as tf
7+ import tf_keras as keras
8+ sys .modules ["keras" ] = keras
9+ # --------------------------------------------
10+
111from flask import Flask , render_template , request
212import transformers
313from transformers import TFAutoModelForCausalLM , AutoTokenizer
4- import tensorflow as tf
514import logging
615from scripts .system .generate_text import generate_text
716import webbrowser
817
18+ # Suppress the noise
919transformers .logging .set_verbosity_error ()
1020tf .get_logger ().setLevel (logging .ERROR )
1121
1222app = Flask (__name__ , static_url_path = '/static' )
1323
1424model_name = "gpt2"
25+
26+ # Now this will load without the 'NoneType' error
1527model = TFAutoModelForCausalLM .from_pretrained (model_name )
1628tokenizer = AutoTokenizer .from_pretrained (model_name , pad_token_id = 50256 )
1729
@@ -26,5 +38,8 @@ def generate():
2638 return render_template ('index.html' , prompt = prompt , generated_text = generated_text )
2739
2840if __name__ == "__main__" :
41+ # Note: debug=True can sometimes cause double-loading of the model
42+ # which might crash your RAM (the 10% warning you keep seeing).
43+ # If it crashes, try setting debug=False.
2944 webbrowser .open ('http://127.0.0.1:5000/' )
3045 app .run (debug = True , use_reloader = False )
0 commit comments