Commit
·
27d2cd5
1
Parent(s):
3f3fa7a
Changed LLMs due to hitting rate limit
Browse files- app.py +1 -1
- src/crew.py +5 -3
app.py
CHANGED
|
@@ -91,7 +91,7 @@ with gr.Blocks(
|
|
| 91 |
|
| 92 |
# Footer with example hint
|
| 93 |
gr.Markdown(
|
| 94 |
-
"
|
| 95 |
"### NOTE: This is just a demo. It is not intended to diagnose or suggest treatment of any disease or condition, and should not be used for medical advice."
|
| 96 |
) # Display disclaimer and usage hint
|
| 97 |
|
|
|
|
| 91 |
|
| 92 |
# Footer with example hint
|
| 93 |
gr.Markdown(
|
| 94 |
+
"Download and use any frontal chest X-ray PNG/JPG file from the internet and click **Generate Report**.\n"
|
| 95 |
"### NOTE: This is just a demo. It is not intended to diagnose or suggest treatment of any disease or condition, and should not be used for medical advice."
|
| 96 |
) # Display disclaimer and usage hint
|
| 97 |
|
src/crew.py
CHANGED
|
@@ -13,8 +13,10 @@ class RadiologyCrew:
|
|
| 13 |
# Initialize LLMs with optimal settings
|
| 14 |
self.vision_llm = LLM(model="groq/meta-llama/llama-4-scout-17b-16e-instruct", temperature=0.2)
|
| 15 |
self.text_llm = LLM(model="groq/llama-3.3-70b-versatile", temperature=0.1)
|
|
|
|
|
|
|
| 16 |
self.draft_llm = LLM(model="groq/deepseek-r1-distill-llama-70b", temperature=0.3)
|
| 17 |
-
self.critic_llm = LLM(model="groq/meta-llama/llama-4-scout-17b-16e-instruct", temperature=0.
|
| 18 |
|
| 19 |
@agent
|
| 20 |
def vision_agent(self) -> Agent:
|
|
@@ -31,7 +33,7 @@ class RadiologyCrew:
|
|
| 31 |
return Agent(
|
| 32 |
config=self.agents_config['pubmed_agent'],
|
| 33 |
tools=[self.tools["pubmed_tool"]],
|
| 34 |
-
llm=self.
|
| 35 |
verbose=False
|
| 36 |
)
|
| 37 |
|
|
@@ -40,7 +42,7 @@ class RadiologyCrew:
|
|
| 40 |
return Agent(
|
| 41 |
config=self.agents_config['iu_agent'],
|
| 42 |
tools=[self.tools["iu_tool"]],
|
| 43 |
-
llm=self.
|
| 44 |
verbose=False
|
| 45 |
)
|
| 46 |
|
|
|
|
| 13 |
# Initialize LLMs with optimal settings
|
| 14 |
self.vision_llm = LLM(model="groq/meta-llama/llama-4-scout-17b-16e-instruct", temperature=0.2)
|
| 15 |
self.text_llm = LLM(model="groq/llama-3.3-70b-versatile", temperature=0.1)
|
| 16 |
+
self.text_llm2 = LLM(model="groq/llama3-70b-8192", temperature=0.1)
|
| 17 |
+
self.text_llm3 = LLM(model="groq/meta-llama/llama-4-maverick-17b-128e-instruct", temperature=0.1)
|
| 18 |
self.draft_llm = LLM(model="groq/deepseek-r1-distill-llama-70b", temperature=0.3)
|
| 19 |
+
self.critic_llm = LLM(model="groq/meta-llama/llama-4-scout-17b-16e-instruct", temperature=0.3)
|
| 20 |
|
| 21 |
@agent
|
| 22 |
def vision_agent(self) -> Agent:
|
|
|
|
| 33 |
return Agent(
|
| 34 |
config=self.agents_config['pubmed_agent'],
|
| 35 |
tools=[self.tools["pubmed_tool"]],
|
| 36 |
+
llm=self.text_llm2,
|
| 37 |
verbose=False
|
| 38 |
)
|
| 39 |
|
|
|
|
| 42 |
return Agent(
|
| 43 |
config=self.agents_config['iu_agent'],
|
| 44 |
tools=[self.tools["iu_tool"]],
|
| 45 |
+
llm=self.text_llm3,
|
| 46 |
verbose=False
|
| 47 |
)
|
| 48 |
|