Update modeling_videochat_flash.py
Browse files
modeling_videochat_flash.py
CHANGED
|
@@ -683,8 +683,8 @@ class VideoChatFlashQwenForCausalLM(LlavaMetaForCausalLM, Qwen2ForCausalLM_Flash
|
|
| 683 |
|
| 684 |
outputs = outputs.strip()
|
| 685 |
|
| 686 |
-
print(f"\033[91m== Question: \033[0m\n{prompt}\n")
|
| 687 |
-
print(f"\033[91m== Response: \033[0m\n{outputs}\n")
|
| 688 |
|
| 689 |
if chat_history is None:
|
| 690 |
chat_history = []
|
|
|
|
| 683 |
|
| 684 |
outputs = outputs.strip()
|
| 685 |
|
| 686 |
+
# print(f"\033[91m== Question: \033[0m\n{prompt}\n")
|
| 687 |
+
# print(f"\033[91m== Response: \033[0m\n{outputs}\n")
|
| 688 |
|
| 689 |
if chat_history is None:
|
| 690 |
chat_history = []
|