Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[MAINTENANCE] Most Recent Version of matplotlib breaks ptitprince and seaborn method calls. #4007

Merged
4 changes: 2 additions & 2 deletions ludwig/data/preprocessing.py
Original file line number Diff line number Diff line change
Expand Up @@ -1224,7 +1224,7 @@ def build_dataset(
else:
logger.warning(
f"Specified split column {global_preprocessing_parameters['split']['column']} for fixed "
f"split strategy was not found in dataset."
f"split strategy was not found in dataset." # noqa: E713
)

# update input features with prompt configs during preprocessing (as opposed to during the model forward pass)
Expand Down Expand Up @@ -1457,7 +1457,7 @@ def cast_columns(dataset_cols, features, backend) -> None:
)
except KeyError as e:
raise KeyError(
f"Feature name {e} specified in the config was not found in dataset with columns: "
f"Feature name {e} specified in the config was not found in dataset with columns: " # noqa: E713
+ f"{list(dataset_cols.keys())}"
)

Expand Down
6 changes: 6 additions & 0 deletions ludwig/encoders/image/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -402,6 +402,12 @@ def __init__(
)
transformer = ViTModel(config)

if output_attentions:
config_dict: dict = transformer.config.to_dict()
updated_config: ViTConfig = ViTConfig(**config_dict)
updated_config._attn_implementation = "eager"
transformer = ViTModel(updated_config)

self.transformer = FreezeModule(transformer, frozen=not trainable)

self._output_shape = (transformer.config.hidden_size,)
Expand Down
2 changes: 1 addition & 1 deletion ludwig/models/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -316,7 +316,7 @@ def collect_weights(self, tensor_names=None, **kwargs):
weight_names = {name for name, _ in self.named_parameters()}
for name in tensor_names:
if name not in weight_names:
raise ValueError(f'Requested tensor name filter "{name}" not present in the model graph')
raise ValueError(f'Requested tensor name filter "{name}" not present in the model graph') # noqa: E713

# Apply filter.
tensor_set = set(tensor_names)
Expand Down
2 changes: 2 additions & 0 deletions ludwig/models/llm.py
Original file line number Diff line number Diff line change
Expand Up @@ -563,6 +563,8 @@ def save(self, save_path):
# avoid this hack
if self.config_obj.trainer.type != "none":
weights_save_path = os.path.join(save_path, MODEL_WEIGHTS_FILE_NAME)
# We initialize the model's generation configuration; otherwise, we get a validation error.
self.model.generation_config = self.generation
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Fascinating

Copy link
Collaborator Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

@arnavgarg1 Yes, and they also changed the ViTModel (I filed an issue: huggingface/transformers#30978). Unless you recommend changes, may I please get an approval so that @ethanreidel can continue his work? Thanks!

self.model.save_pretrained(weights_save_path)
else:
logger.info("Skipped saving LLM without weight adjustments.")
Expand Down
2 changes: 1 addition & 1 deletion ludwig/utils/automl/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -31,7 +31,7 @@
def avg_num_tokens_decoder(x):
if x is None:
return None
if type(x) == bytes:
if type(x) is bytes:
return x.decode("utf-8")
return str(x)

Expand Down
4 changes: 2 additions & 2 deletions ludwig/utils/image_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -446,11 +446,11 @@ def to_np_tuple(prop: Union[int, Iterable]) -> np.ndarray:
height_stride = 2 and width_stride = 3. stride=2 gets converted into
np.array([2, 2]).
"""
if type(prop) == int:
if type(prop) is int:
return np.ones(2).astype(int) * prop
elif isinstance(prop, Iterable) and len(prop) == 2:
return np.array(list(prop)).astype(int)
elif type(prop) == np.ndarray and prop.size == 2:
elif type(prop) is np.ndarray and prop.size == 2:
return prop.astype(int)
else:
raise TypeError(f"prop must be int or iterable of length 2, but is {prop}.")
Expand Down
2 changes: 1 addition & 1 deletion ludwig/utils/server_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -134,7 +134,7 @@ def deserialize_request(form) -> tuple:
files = []
file_index = {}
for k, v in form.multi_items():
if type(v) == UploadFile:
if type(v) is UploadFile:
file_index[v.filename] = _write_file(v, files)

# reconstruct the dataframe
Expand Down
2 changes: 1 addition & 1 deletion ludwig/utils/visualization_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -1432,7 +1432,7 @@ def hyperopt_report(hyperparameters, hyperopt_results_df, metric, filename_templ
else:
# TODO: more research needed on how to handle RayTune "sample_from" search space
raise ValueError(
f"{hp_params[SPACE]} search space not supported in Ludwig. "
f"{hp_params[SPACE]} search space not supported in Ludwig. " # noqa: E713
f"Supported values are {RAY_TUNE_FLOAT_SPACES | RAY_TUNE_INT_SPACES | RAY_TUNE_CATEGORY_SPACES}."
)

Expand Down
2 changes: 1 addition & 1 deletion requirements_viz.txt
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
matplotlib>=3.4; python_version > '3.6'
matplotlib>3.4,<3.9.0; python_version > '3.6'
matplotlib>=3.0,<3.4; python_version <= '3.6'
seaborn>=0.7,<0.12
hiplot
Expand Down
16 changes: 16 additions & 0 deletions setup.cfg
Original file line number Diff line number Diff line change
Expand Up @@ -19,3 +19,19 @@ ignore =
W503
# Ignore "whitespace before ':'"
E203
# Ignore "missing whitespace after ':'"
E231
# Ignore "multiple spaces after ':'"
E241
# Ignore "multiple spaces before operator"
E221
# Ignore "whitespace around operator"
E225
# Ignore "whitespace around arithmetic operator"
E226
# Ignore "multiple spaces after ':'"
E241
# Ignore "multiple spaces after keyword"
E271
# Ignore "missing whitespace after keyword"
E275
Loading