Spaces:
Running
Running
Commit
·
2194060
1
Parent(s):
0b22cf5
Weird Test
Browse files- model/analyzer.py +2 -2
model/analyzer.py
CHANGED
@@ -139,7 +139,7 @@ class ContentAnalyzer:
|
|
139 |
logger.error(f"Error loading model: {str(e)}")
|
140 |
raise
|
141 |
|
142 |
-
def _chunk_text(self, text: str, chunk_size: int =
|
143 |
"""Split text into overlapping chunks for processing."""
|
144 |
return [text[i:i + chunk_size] for i in range(0, len(text), chunk_size - overlap)]
|
145 |
|
@@ -172,7 +172,7 @@ class ContentAnalyzer:
|
|
172 |
with torch.no_grad():
|
173 |
outputs = self.model.generate(
|
174 |
**inputs,
|
175 |
-
max_new_tokens=
|
176 |
do_sample=True,
|
177 |
temperature=0.5,
|
178 |
top_p=0.9,
|
|
|
139 |
logger.error(f"Error loading model: {str(e)}")
|
140 |
raise
|
141 |
|
142 |
+
def _chunk_text(self, text: str, chunk_size: int = 128, overlap: int = 5) -> List[str]:
|
143 |
"""Split text into overlapping chunks for processing."""
|
144 |
return [text[i:i + chunk_size] for i in range(0, len(text), chunk_size - overlap)]
|
145 |
|
|
|
172 |
with torch.no_grad():
|
173 |
outputs = self.model.generate(
|
174 |
**inputs,
|
175 |
+
max_new_tokens=5,
|
176 |
do_sample=True,
|
177 |
temperature=0.5,
|
178 |
top_p=0.9,
|