Skip to content

Commit

Permalink
Merge pull request #204 from tharapalanivel/fix_ordering_issue
Browse files Browse the repository at this point in the history
🐛 Align inference params order
  • Loading branch information
gkumbhat authored Sep 22, 2023
2 parents 617e4d9 + 57592f0 commit 39357e4
Show file tree
Hide file tree
Showing 2 changed files with 4 additions and 4 deletions.
4 changes: 2 additions & 2 deletions caikit_nlp/modules/text_generation/peft_tgis_remote.py
Original file line number Diff line number Diff line change
Expand Up @@ -179,7 +179,6 @@ def save(self, model_path: str):
def run(
self,
text: str,
preserve_input_text: bool = False,
max_new_tokens: Optional[int] = 20,
min_new_tokens: Optional[int] = 0,
truncate_input_tokens: Optional[int] = 0,
Expand All @@ -195,6 +194,7 @@ def run(
Union[Tuple[int, float], ExponentialDecayLengthPenalty]
] = None,
stop_sequences: Optional[List[str]] = None,
preserve_input_text: bool = False,
) -> GeneratedTextResult:
"""Run inference against the model running in TGIS.
Expand Down Expand Up @@ -235,7 +235,6 @@ def run(
def run_stream_out(
self,
text: str,
preserve_input_text: bool = False,
max_new_tokens: Optional[int] = 20,
min_new_tokens: Optional[int] = 0,
truncate_input_tokens: Optional[int] = 0,
Expand All @@ -251,6 +250,7 @@ def run_stream_out(
Union[Tuple[int, float], ExponentialDecayLengthPenalty]
] = None,
stop_sequences: Optional[List[str]] = None,
preserve_input_text: bool = False,
) -> Iterable[GeneratedTextStreamResult]:
"""Run output stream inferencing against the model running in TGIS
Expand Down
4 changes: 2 additions & 2 deletions caikit_nlp/modules/text_generation/text_generation_tgis.py
Original file line number Diff line number Diff line change
Expand Up @@ -205,7 +205,6 @@ def save(self, model_path: str):
def run(
self,
text: str,
preserve_input_text: bool = False,
max_new_tokens: Optional[int] = 20,
min_new_tokens: Optional[int] = 0,
truncate_input_tokens: Optional[int] = 0,
Expand All @@ -221,6 +220,7 @@ def run(
Union[Tuple[int, float], ExponentialDecayLengthPenalty]
] = None,
stop_sequences: Optional[List[str]] = None,
preserve_input_text: bool = False,
) -> GeneratedTextResult:
"""Run inference against the model running in TGIS.
Expand Down Expand Up @@ -256,7 +256,6 @@ def run(
def run_stream_out(
self,
text: str,
preserve_input_text: bool = False,
max_new_tokens: Optional[int] = 20,
min_new_tokens: Optional[int] = 0,
truncate_input_tokens: Optional[int] = 0,
Expand All @@ -272,6 +271,7 @@ def run_stream_out(
Union[Tuple[int, float], ExponentialDecayLengthPenalty]
] = None,
stop_sequences: Optional[List[str]] = None,
preserve_input_text: bool = False,
) -> Iterable[GeneratedTextStreamResult]:
"""Run output stream inferencing for text generation module.
Expand Down

0 comments on commit 39357e4

Please sign in to comment.