Spaces:
Sleeping
Sleeping
| import gradio as gr | |
| from transformers import AutoModelForCausalLM, AutoTokenizer | |
| import torch | |
| # Load GPT-2 model and tokenizer | |
| model = AutoModelForCausalLM.from_pretrained("gpt2") | |
| tokenizer = AutoTokenizer.from_pretrained("gpt2") | |
| def compare_claims(claim1, claim2): | |
| """ | |
| Compare two insurance claims using GPT-2. | |
| """ | |
| prompt = f"Compare these two health insurance claims:\n\nClaim 1: {claim1}\n\nClaim 2: {claim2}\n\nDifferences and similarities:" | |
| # Tokenize input | |
| input_ids = tokenizer(prompt, return_tensors="pt").input_ids | |
| # Generate response | |
| gen_tokens = model.generate( | |
| input_ids, | |
| do_sample=True, | |
| temperature=0.7, # Adjust creativity | |
| max_length=150, # Limit output size | |
| pad_token_id=tokenizer.eos_token_id | |
| ) | |
| # Decode and return output | |
| return tokenizer.decode(gen_tokens[0], skip_special_tokens=True) | |
| def main(): | |
| """ | |
| Launch the Gradio interface for claim comparison. | |
| """ | |
| # Define the Gradio interface | |
| interface = gr.Interface( | |
| fn=compare_claims, | |
| inputs=[ | |
| gr.Textbox(label="claim1", placeholder="Enter first claim description..."), | |
| gr.Textbox(label="claim2", placeholder="Enter second claim description...") | |
| ], | |
| outputs="text", | |
| title="Claims Comparison", | |
| description="Enter two claims to compare their differences." | |
| ) | |
| # Launch the Gradio app | |
| interface.launch() | |
| if __name__ == "__main__": | |
| main() |