diff options
author | Ben Sima <ben@bsima.me> | 2023-08-16 17:19:38 -0400 |
---|---|---|
committer | Ben Sima <ben@bsima.me> | 2023-08-16 18:25:00 -0400 |
commit | 312eed7089e33aede0454d72e677092e297f7a72 (patch) | |
tree | b061c1e3f53db684f1fdc352f5379857231c7b2a /ava.py | |
parent | 7d7e0c02351303489d5555627337a39b519b536a (diff) |
Delete unused file
Diffstat (limited to 'ava.py')
-rwxr-xr-x | ava.py | 81 |
1 files changed, 0 insertions, 81 deletions
@@ -1,81 +0,0 @@ -#!/usr/bin/env python -# : out ava -# : dep transformers -# : dep torch -# : dep accelerate -# : dep bitsandbytes -import transformers -import torch -import sys - -# import sleekxmpp - - -model_name = "lmsys/vicuna-33b-v1.3" - -if torch.cuda.is_available(): - device = "cuda:0" -else: - raise ValueError("no cuda") - sys.exit(1) - -tokenizer = transformers.AutoTokenizer.from_pretrained(model_name) -model = transformers.AutoModelForCausalLM.from_pretrained( - model_name, - device_map="auto", - load_in_8bit=True, - pad_token_id=tokenizer.eos_token_id, - revision="float16", - torch_dtype=torch.float16, - low_cpu_mem_usage=True, -) - -# set attention_mask and pad_token_id - - -def gen(txt): - input_ids = tokenizer(txt, return_tensors="pt").input_ids.to("cuda") - outputs = model.generate( - input_ids=input_ids, - max_length=1024, - temperature=0.7, - ) - result = tokenizer.batch_decode(outputs, skip_special_tokens=True) - result = "".join(result) - return result - - -# Get user input and generate a response -while True: - user_input = input("ben: ") - response = gen(user_input) - print("bot: ", response) - - -""" -# Set up the XMPP client -client = sleekxmpp.ClientXMPP( - "ava@simatime.com", - "test" -) -client.connect() -client.process(block=True) - -# Define a function that takes in a user's input and returns a response -def generate_response(input_text): - # You would use your language model to generate a response here - response = "This is a response to the user's input: " + input_text - return response - -# Handle incoming messages -@client.add_event_handler("message") -def handle_message(message): - # Get the user's input - user_input = message["body"] - - # Generate a response - response = generate_response(user_input) - - # Send the response to the user - message.reply(response).send() -""" |