don't worry about duplicate code here
Browse files
tests/test_packed_dataset.py
CHANGED
|
@@ -17,6 +17,7 @@ class TestPacking(unittest.TestCase):
|
|
| 17 |
"""
|
| 18 |
|
| 19 |
def setUp(self) -> None:
|
|
|
|
| 20 |
self.tokenizer = AutoTokenizer.from_pretrained("huggyllama/llama-7b")
|
| 21 |
self.tokenizer.add_special_tokens(
|
| 22 |
{
|
|
|
|
| 17 |
"""
|
| 18 |
|
| 19 |
def setUp(self) -> None:
|
| 20 |
+
# pylint: disable=duplicate-code
|
| 21 |
self.tokenizer = AutoTokenizer.from_pretrained("huggyllama/llama-7b")
|
| 22 |
self.tokenizer.add_special_tokens(
|
| 23 |
{
|
tests/test_prompt_tokenizers.py
CHANGED
|
@@ -18,6 +18,7 @@ class TestPromptTokenizationStrategies(unittest.TestCase):
|
|
| 18 |
"""
|
| 19 |
|
| 20 |
def setUp(self) -> None:
|
|
|
|
| 21 |
self.tokenizer = AutoTokenizer.from_pretrained("huggyllama/llama-7b")
|
| 22 |
self.tokenizer.add_special_tokens(
|
| 23 |
{
|
|
|
|
| 18 |
"""
|
| 19 |
|
| 20 |
def setUp(self) -> None:
|
| 21 |
+
# pylint: disable=duplicate-code
|
| 22 |
self.tokenizer = AutoTokenizer.from_pretrained("huggyllama/llama-7b")
|
| 23 |
self.tokenizer.add_special_tokens(
|
| 24 |
{
|