-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathoffline.py
58 lines (46 loc) · 1.79 KB
/
offline.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
import os
import requests
from dotenv import load_dotenv
from transformers import AutoTokenizer, AutoModelForCausalLM
import streamlit as st
from bs4 import BeautifulSoup
# Load environment variables from .env file
load_dotenv()
# Load any necessary API credentials from environment variables
API_KEY = os.getenv("API_KEY")
# ... load other credentials if needed
# Load the model and tokenizer
tokenizer = AutoTokenizer.from_pretrained("facebook/opt-iml-1.3b")
model = AutoModelForCausalLM.from_pretrained("facebook/opt-iml-1.3b")
# Function to generate a response
def generate_response(prompt, context="", max_tokens=256):
input_text = f"{context} {prompt}"
input_ids = tokenizer.encode(input_text, return_tensors="pt")
output = model.generate(input_ids, max_length=max_tokens, do_sample=True, top_k=50, top_p=0.95, num_return_sequences=1)
response = tokenizer.decode(output[0], skip_special_tokens=True)
return response
# Function to scrape content from a URL
def scrape_url(url):
response = requests.get(url)
soup = BeautifulSoup(response.text, "html.parser")
content = soup.get_text()
return content
# Streamlit app
def main():
st.title("Local ChatGPT with URL Scraping")
# Get user input
prompt = st.text_area("Enter your prompt:", height=200)
url = st.text_input("Enter a URL (optional):")
if st.button("Generate Response"):
if url:
try:
scraped_content = scrape_url(url)
response = generate_response(prompt, context=scraped_content)
except Exception as e:
st.error(f"Error scraping URL: {e}")
response = generate_response(prompt)
else:
response = generate_response(prompt)
st.success(response)
if __name__ == "__main__":
main()