Add application file
Browse files
app.py
CHANGED
|
@@ -9,7 +9,6 @@ assert (
|
|
| 9 |
), "LLaMA is now in HuggingFace's main branch.\nPlease reinstall it: pip uninstall transformers && pip install git+https://github.com/huggingface/transformers.git"
|
| 10 |
from transformers import LlamaTokenizer, LlamaForCausalLM, GenerationConfig
|
| 11 |
access_token = os.environ.get('HF_TOKEN')
|
| 12 |
-
print(access_token)
|
| 13 |
|
| 14 |
tokenizer = LlamaTokenizer.from_pretrained("meta-llama/Llama-2-7b-hf", token=access_token)
|
| 15 |
|
|
@@ -27,6 +26,8 @@ try:
|
|
| 27 |
except:
|
| 28 |
pass
|
| 29 |
|
|
|
|
|
|
|
| 30 |
if device == "cuda":
|
| 31 |
model = LlamaForCausalLM.from_pretrained(
|
| 32 |
BASE_MODEL,
|
|
|
|
| 9 |
), "LLaMA is now in HuggingFace's main branch.\nPlease reinstall it: pip uninstall transformers && pip install git+https://github.com/huggingface/transformers.git"
|
| 10 |
from transformers import LlamaTokenizer, LlamaForCausalLM, GenerationConfig
|
| 11 |
access_token = os.environ.get('HF_TOKEN')
|
|
|
|
| 12 |
|
| 13 |
tokenizer = LlamaTokenizer.from_pretrained("meta-llama/Llama-2-7b-hf", token=access_token)
|
| 14 |
|
|
|
|
| 26 |
except:
|
| 27 |
pass
|
| 28 |
|
| 29 |
+
print("Device: " + str(device))
|
| 30 |
+
|
| 31 |
if device == "cuda":
|
| 32 |
model = LlamaForCausalLM.from_pretrained(
|
| 33 |
BASE_MODEL,
|