Bils commited on
Commit
00f9f38
·
verified ·
1 Parent(s): 397d7d9

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +2 -5
app.py CHANGED
@@ -4,7 +4,7 @@ from transformers import AutoConfig, AutoModelForCausalLM
4
  from janus.models import MultiModalityCausalLM, VLChatProcessor
5
  from PIL import Image
6
  import numpy as np
7
- import spaces
8
 
9
  # Load the model and processor
10
  model_path = "deepseek-ai/Janus-Pro-7B"
@@ -17,10 +17,7 @@ vl_gpt = AutoModelForCausalLM.from_pretrained(
17
  language_config=language_config,
18
  trust_remote_code=True
19
  )
20
- if torch.cuda.is_available():
21
- vl_gpt = vl_gpt.to(torch.bfloat16).cuda()
22
- else:
23
- vl_gpt = vl_gpt.to(torch.float16)
24
 
25
  vl_chat_processor = VLChatProcessor.from_pretrained(model_path)
26
  tokenizer = vl_chat_processor.tokenizer
 
4
  from janus.models import MultiModalityCausalLM, VLChatProcessor
5
  from PIL import Image
6
  import numpy as np
7
+ import spaces # Ensure this is available
8
 
9
  # Load the model and processor
10
  model_path = "deepseek-ai/Janus-Pro-7B"
 
17
  language_config=language_config,
18
  trust_remote_code=True
19
  )
20
+ vl_gpt = vl_gpt.to(torch.bfloat16).cuda() if torch.cuda.is_available() else vl_gpt.to(torch.float16)
 
 
 
21
 
22
  vl_chat_processor = VLChatProcessor.from_pretrained(model_path)
23
  tokenizer = vl_chat_processor.tokenizer