diff --git a/caikit_nlp/modules/text_generation/peft_tgis_remote.py b/caikit_nlp/modules/text_generation/peft_tgis_remote.py index 8b76aef7..fa21e34c 100644 --- a/caikit_nlp/modules/text_generation/peft_tgis_remote.py +++ b/caikit_nlp/modules/text_generation/peft_tgis_remote.py @@ -179,7 +179,6 @@ def save(self, model_path: str): def run( self, text: str, - preserve_input_text: bool = False, max_new_tokens: Optional[int] = 20, min_new_tokens: Optional[int] = 0, truncate_input_tokens: Optional[int] = 0, @@ -195,6 +194,7 @@ def run( Union[Tuple[int, float], ExponentialDecayLengthPenalty] ] = None, stop_sequences: Optional[List[str]] = None, + preserve_input_text: bool = False, ) -> GeneratedTextResult: """Run inference against the model running in TGIS. @@ -235,7 +235,6 @@ def run( def run_stream_out( self, text: str, - preserve_input_text: bool = False, max_new_tokens: Optional[int] = 20, min_new_tokens: Optional[int] = 0, truncate_input_tokens: Optional[int] = 0, @@ -251,6 +250,7 @@ def run_stream_out( Union[Tuple[int, float], ExponentialDecayLengthPenalty] ] = None, stop_sequences: Optional[List[str]] = None, + preserve_input_text: bool = False, ) -> Iterable[GeneratedTextStreamResult]: """Run output stream inferencing against the model running in TGIS diff --git a/caikit_nlp/modules/text_generation/text_generation_tgis.py b/caikit_nlp/modules/text_generation/text_generation_tgis.py index e2130ed7..b537889b 100644 --- a/caikit_nlp/modules/text_generation/text_generation_tgis.py +++ b/caikit_nlp/modules/text_generation/text_generation_tgis.py @@ -205,7 +205,6 @@ def save(self, model_path: str): def run( self, text: str, - preserve_input_text: bool = False, max_new_tokens: Optional[int] = 20, min_new_tokens: Optional[int] = 0, truncate_input_tokens: Optional[int] = 0, @@ -221,6 +220,7 @@ def run( Union[Tuple[int, float], ExponentialDecayLengthPenalty] ] = None, stop_sequences: Optional[List[str]] = None, + preserve_input_text: bool = False, ) -> GeneratedTextResult: """Run inference against the model running in TGIS. @@ -256,7 +256,6 @@ def run( def run_stream_out( self, text: str, - preserve_input_text: bool = False, max_new_tokens: Optional[int] = 20, min_new_tokens: Optional[int] = 0, truncate_input_tokens: Optional[int] = 0, @@ -272,6 +271,7 @@ def run_stream_out( Union[Tuple[int, float], ExponentialDecayLengthPenalty] ] = None, stop_sequences: Optional[List[str]] = None, + preserve_input_text: bool = False, ) -> Iterable[GeneratedTextStreamResult]: """Run output stream inferencing for text generation module.