Update functions.py
Browse files- functions.py +2 -3
functions.py
CHANGED
@@ -2,8 +2,8 @@ from urllib.request import urlopen, Request
|
|
2 |
from urllib.error import URLError
|
3 |
from bs4 import BeautifulSoup
|
4 |
import re
|
|
|
5 |
def predict(text):
|
6 |
-
max_length = 612
|
7 |
encoded = tokenizer(text, truncation=True, padding=True, max_length=max_length, return_tensors='tf')
|
8 |
|
9 |
pred = model.predict(dict(encoded), verbose=0)
|
@@ -49,8 +49,7 @@ def scrape_website(url):
|
|
49 |
print("Failed to retrieve the webpage.")
|
50 |
except URLError as e:
|
51 |
print("An error occurred while making the request:", e)
|
52 |
-
def segmentation(text):
|
53 |
-
max_length = 627 # Maximum length for each chunk
|
54 |
total_predictions = 0
|
55 |
human_written_count = 0
|
56 |
ai_generated_count = 0
|
|
|
2 |
from urllib.error import URLError
|
3 |
from bs4 import BeautifulSoup
|
4 |
import re
|
5 |
+
max_length = 627
|
6 |
def predict(text):
|
|
|
7 |
encoded = tokenizer(text, truncation=True, padding=True, max_length=max_length, return_tensors='tf')
|
8 |
|
9 |
pred = model.predict(dict(encoded), verbose=0)
|
|
|
49 |
print("Failed to retrieve the webpage.")
|
50 |
except URLError as e:
|
51 |
print("An error occurred while making the request:", e)
|
52 |
+
def segmentation(text): # Maximum length for each chunk
|
|
|
53 |
total_predictions = 0
|
54 |
human_written_count = 0
|
55 |
ai_generated_count = 0
|