import gradio as gr import random from transformers import AutoTokenizer, AutoModelForSeq2SeqLM # Load a lightweight model for faster responses tokenizer = AutoTokenizer.from_pretrained("facebook/blenderbot-400M-distill") model = AutoModelForSeq2SeqLM.from_pretrained("facebook/blenderbot-400M-distill") # AI Identity AI_NAME = "Lan π" user_memory = {} def chat(user_input, history=[]): global AI_NAME, user_memory user_input_lower = user_input.lower() # Name Recognition if "call you" in user_input_lower or "your name is" in user_input_lower: new_name = user_input.split("call you")[-1].strip(" ?.!") or user_input.split("your name is")[-1].strip(" ?.!") AI_NAME = new_name.capitalize() + " π" return f"Yay! You can now call me {AI_NAME}! π" if "what is your name" in user_input_lower or "tell your name" in user_input_lower: return f"My name is {AI_NAME}! π" # Store user details if "my name is" in user_input_lower: user_name = user_input.split("my name is")[-1].strip(" ?.!") user_memory['name'] = user_name return f"Nice to meet you, {user_name}! π" if "i am" in user_input_lower and "years old" in user_input_lower: age = ''.join(filter(str.isdigit, user_input)) user_memory['age'] = age return f"Wow, {age} is a great age! π₯³" if "i live in" in user_input_lower: city = user_input.split("i live in")[-1].strip(" ?.!") user_memory['city'] = city return f"{city} sounds like a beautiful place! π" if "what is my name" in user_input_lower: return f"Your name is {user_memory.get('name', 'Hmm, I donβt think you told me yet! π€')}" if "where do i live" in user_input_lower: return f"You live in {user_memory.get('city', 'a mystery place! π€')}" if "how old am i" in user_input_lower: return f"You are {user_memory.get('age', 'hmm... I donβt think you told me yet! π€')} years old." # AI Chat Processing inputs = tokenizer(user_input, return_tensors="pt") reply_ids = model.generate(**inputs, max_length=100) response = tokenizer.decode(reply_ids[0], skip_special_tokens=True) return response # Gradio UI def respond(message, history): response = chat(message) history.append((message, response)) return history, "" with gr.Blocks(theme=gr.themes.Soft()) as iface: gr.Markdown("