import os from flask import Flask, request, render_template import pandas as pd import torch from transformers import BertTokenizer, BertForSequenceClassification from collections import Counter import matplotlib matplotlib.use('Agg') # Prevents GUI issues for Matplotlib import matplotlib.pyplot as plt import base64 from io import BytesIO # Set writable cache directories within /tmp os.environ["HF_HOME"] = "/tmp/huggingface_cache" # Replaces TRANSFORMERS_CACHE os.environ["MPLCONFIGDIR"] = "/tmp/matplotlib" # Create directories if they don't exist os.makedirs(os.environ["HF_HOME"], exist_ok=True) os.makedirs(os.environ["MPLCONFIGDIR"], exist_ok=True) app = Flask(__name__) # Load Model from Local Directory MODEL_PATH = "bert_imdb_model.bin" TOKENIZER_PATH = "bert-base-uncased" if os.path.exists(MODEL_PATH): print("Loading model from local file...") model = BertForSequenceClassification.from_pretrained('bert-base-uncased', num_labels=2) model.load_state_dict(torch.load(MODEL_PATH, map_location=torch.device('cpu'))) else: print(f"Error: Model file {MODEL_PATH} not found.") exit(1) model.eval() tokenizer = BertTokenizer.from_pretrained(TOKENIZER_PATH) # ... rest of your code (keep the rest unchanged) ... def predict_sentiment(text): tokens = tokenizer.encode(text, add_special_tokens=True) chunks = [tokens[i:i + 512] for i in range(0, len(tokens), 512)] sentiments = [] for chunk in chunks: inputs = tokenizer.decode(chunk, skip_special_tokens=True, clean_up_tokenization_spaces=True) inputs = tokenizer(inputs, return_tensors="pt", truncation=True, padding=True, max_length=512) with torch.no_grad(): outputs = model(**inputs) sentiments.append(outputs.logits.argmax(dim=1).item()) majority_sentiment = Counter(sentiments).most_common(1)[0][0] return 'Positive' if majority_sentiment == 1 else 'Negative' @app.route('/') def upload_file(): return render_template('upload.html') @app.route('/analyze_text', methods=['POST']) def analyze_text(): text = request.form['text'] sentiment = predict_sentiment(text) return render_template('upload.html', sentiment=sentiment) @app.route('/uploader', methods=['GET', 'POST']) def upload_file_post(): if request.method == 'POST': f = request.files['file'] data = pd.read_csv(f) data['sentiment'] = data['review'].apply(predict_sentiment) sentiment_counts = data['sentiment'].value_counts().to_dict() summary = f"Total Reviews: {len(data)}
" \ f"Positive: {sentiment_counts.get('Positive', 0)}
" \ f"Negative: {sentiment_counts.get('Negative', 0)}
" fig, ax = plt.subplots() ax.bar(sentiment_counts.keys(), sentiment_counts.values(), color=['red', 'blue']) ax.set_ylabel('Counts') ax.set_title('Sentiment Analysis Summary') img = BytesIO() plt.savefig(img, format='png', bbox_inches='tight') img.seek(0) plot_url = base64.b64encode(img.getvalue()).decode('utf8') plt.close(fig) return render_template('result.html', tables=[data.to_html(classes='data')], titles=data.columns.values, summary=summary, plot_url=plot_url) if __name__ == '__main__': app.run(host='0.0.0.0', port=7860, debug=True)