CheckGPT / app.py
Alexcruger's picture
update app commit
469f3c3
##### PREPARATIONS
# libraries
import datetime
import json
import os
import sys
import requests
import streamlit as st
MIN_TEXT_LEN = 300
MAX_TEXT_LEN = 32410
API_URL = "http://checkgpt.app:8880/predict"
# download with progress bar
mybar = None
def show_progress(block_num, block_size, total_size):
global mybar
if mybar is None:
mybar = st.progress(0.0)
downloaded = block_num * block_size / total_size
if downloaded <= 1.0:
mybar.progress(downloaded)
else:
mybar.progress(1.0)
##### CONFIG
# page config
st.set_page_config(page_title="CheckGPT - ChatGPT and other big LM detection engine",
page_icon=":books:",
layout="centered",
initial_sidebar_state="collapsed",
menu_items=None)
##### HEADER
# title
st.title('CheckGPT - AI-written text detect')
# description
st.write(
'CheckGPT is a neural network to check if text is generated by '\
'big AI LMs (like ChatGPT, GPT3, GPT2, BLOOM, You.com AI and etc).')
st.write('Currently supported languages are: [\'en\'].')
st.write("Use our web app and RestAPI at https://checkgpt.app.\n")
st.write('Already telegram bot is available at: https://t.me/chatgpt_bot.')
st.write('To connect with authors please write to: https://t.me/uberwow | https://CheckGPT.app')
##### PARAMETERS
# title
st.header('Check for AI generated text?')
# input text
input_text = st.text_area('Which text would you like to check?', '')
##### MODELING
# compute readability
if st.button('Check'):
# compute predictions
with st.spinner('Computing prediction...'):
# compute prediction
if len(input_text) < MIN_TEXT_LEN:
st.error(f'😐 Minimal text length: {MIN_TEXT_LEN} characters. Your text is {len(input_text)} characters.\n'
f'The longer the text, the higher the accuracy.\n')
elif len(input_text) > MAX_TEXT_LEN:
st.error('⚠ Seems we got too big input! Please try again with smaller text!')
else:
# make json-valid request
formatted_req = {"text": input_text}
jsoned_req = json.dumps(formatted_req)
# try to send request to api
try:
r = requests.post(API_URL, data=jsoned_req, headers={'Content-Type': 'application/json'})
answer = json.loads(r.text)
except:
st.error('Some backend RESTAPI error. Please try later!')
print("Some error while sending POST request to RESTAPI. Please check your connection and try again!")
# extract data from json anwser
result = int(answer["result"])
human_score = round(float(answer["human_score"]), 4)
chatgpt_score = round(float(answer["ChatGPT_score"]), 4)
req_id = answer["request_id"]
execution_time = answer["execution_time"]
# clear memory
#gc.collect()
# print output
st.metric(label="Prediction", value=result)
st.write(f"βœ… Total score (AI signs):\n\n Human-like score: {human_score}%\n ChatGPT-like score: {chatgpt_score}%\n")
st.write(f"βš™ Information:\n\n Execution time: {execution_time} sec.\n Request ID: {req_id}\n Prediction: ", result)
if result == 0:
st.success(f"πŸ‘¨ Seems like text was written by the human ({human_score}%)!")
if result == 1:
st.success(f"πŸ€– Seems to be a ChatGPT generated text! ({chatgpt_score}%)")
# save log
with open('logs/' + 'text_' + str(
result) + '_' + '{:%Y-%m-%d_%H-%M-%S}'.format(
datetime.datetime.now()) + '.log', 'w', encoding="utf-8") as file:
file.write(input_text)
##### DOCUMENTATION
# header
st.header('More information')
# models
with st.expander('Read about the models'):
st.write(
'For ChatGPT generated content detection, we are using statistical and heuristical methods, perplexity, '
'entropy, '
'coherence and consistency of the text and some our personal know-how ;).')
# metric
##### CONTACT
# header
st.header("Contact")
# website link
# copyright
st.text("All rights reserved Β© 2023 | https://t.me/uberwow | https://CheckGPT.app")