-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathrun-test.sh
24 lines (21 loc) · 1000 Bytes
/
run-test.sh
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
# python run-llm.py \
# --prompts_file "mistral-7b-reverse-instruct-prompts.txt" \
# --output_file "mistral-7b-reverse-instruct-responses.txt" \
# --model_name "Philipp-Sc/mistral-7b-reverse-instruct" \
# --tokenizer_name "meta-llama/Llama-2-7b-chat-hf" \
# --config_path "./config.json" \
# --prompt_template_file "mistral-7b-reverse-instruct-prompt-template.txt"
# python run-llm.py \
# --prompts_file "prompts.txt" \
# --output_file "vikp_reverse_instruct-responses.txt" \
# --model_name "vikp/reverse_instruct" \
# --prompt_template_file "vikp_reverse_instruct-prompt-template.txt"
python run-llm.py \
--prompts_file "prompts.txt" \
--output_file "reverse-pythia-160m-responses.txt" \
--model_name "afterless/reverse-pythia-160m" \
--flip_tokens
# python run-llm.py \
# --prompts_file "reverse-pythia-160m-responses.txt" \
# --output_file "llama-2-7b-chat-hf-responses.txt" \
# --model_name "meta-llama/Llama-2-7b-chat-hf" \