-
Notifications
You must be signed in to change notification settings - Fork 362
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
- Loading branch information
Showing
7 changed files
with
146 additions
and
27 deletions.
There are no files selected for viewing
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Empty file.
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1 @@ | ||
__version__ = "0.34.1-dev0" |
Empty file.
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,106 @@ | ||
import logging | ||
import os | ||
import typing | ||
import urllib.parse | ||
from typing import Any, Dict, List, Union | ||
|
||
import filelock | ||
import numpy as np | ||
import requests | ||
import torch | ||
|
||
|
||
def expand_like(arrays: List[np.ndarray], fill: float = -100) -> np.ndarray: | ||
""" | ||
Stacks a list of arrays along the first dimension; the arrays are allowed to differ in | ||
the second dimension but should match for dim > 2. | ||
The output will have dimension | ||
(sum([l.shape[0] for l in arrays]), max([l.shape[1] for l in in arrays]), ...) | ||
For arrays that have fewer entries in the second dimension than the max, we will | ||
pad with the fill value. | ||
Args: | ||
arrays: List of np.ndarray to stack along the first dimension | ||
fill: Value to fill in when padding to max size in the second dimension | ||
Returns: | ||
stacked array | ||
""" | ||
full_shape = list(arrays[0].shape) | ||
if len(full_shape) == 1: | ||
return np.concatenate(arrays) | ||
full_shape[0] = sum(a.shape[0] for a in arrays) | ||
full_shape[1] = max(a.shape[1] for a in arrays) | ||
result = np.full(full_shape, fill) | ||
row_offset = 0 | ||
for a in arrays: | ||
result[row_offset : row_offset + a.shape[0], : a.shape[1]] = a | ||
row_offset += a.shape[0] | ||
return result | ||
|
||
|
||
def numpify(x: Union[List, np.ndarray, torch.Tensor]) -> np.ndarray: | ||
""" | ||
Converts List or torch.Tensor to numpy.ndarray. | ||
""" | ||
if isinstance(x, np.ndarray): | ||
return x | ||
if isinstance(x, List): | ||
return np.array(x) | ||
if isinstance(x, torch.Tensor): | ||
return x.cpu().numpy() # type: ignore | ||
raise TypeError("Expected input of type List, np.ndarray, or torch.Tensor.") | ||
|
||
|
||
def download_url(download_directory: str, url: str) -> str: | ||
url_path = urllib.parse.urlparse(url).path | ||
basename = url_path.rsplit("/", 1)[1] | ||
|
||
os.makedirs(download_directory, exist_ok=True) | ||
filepath = os.path.join(download_directory, basename) | ||
lock = filelock.FileLock(filepath + ".lock") | ||
|
||
with lock: | ||
if not os.path.exists(filepath): | ||
logging.info("Downloading {} to {}".format(url, filepath)) | ||
|
||
r = requests.get(url, stream=True) | ||
with open(filepath, "wb") as f: | ||
for chunk in r.iter_content(chunk_size=8192): | ||
if chunk: | ||
f.write(chunk) | ||
return filepath | ||
|
||
|
||
def compute_num_training_steps(experiment_config: Dict, global_batch_size: int) -> int: | ||
max_length_unit = list(experiment_config["searcher"]["max_length"].keys())[0] | ||
max_length: int = experiment_config["searcher"]["max_length"][max_length_unit] | ||
if max_length_unit == "batches": | ||
return max_length | ||
if max_length_unit == "epochs": | ||
if "records_per_epoch" in experiment_config: | ||
return max_length * int(experiment_config["records_per_epoch"] / global_batch_size) | ||
raise Exception( | ||
"Missing num_training_steps hyperparameter in the experiment " | ||
"configuration, which is needed to configure the learning rate scheduler." | ||
) | ||
# Otherwise, max_length_unit=='records' | ||
return int(max_length / global_batch_size) | ||
|
||
|
||
class AttrDict(dict): | ||
def __init__(self, *args: Any, **kwargs: Any) -> None: | ||
super().__init__(*args, **kwargs) | ||
self.__dict__ = self | ||
for key in self.keys(): | ||
if isinstance(self[key], dict): | ||
self[key] = AttrDict(self[key]) | ||
|
||
if typing.TYPE_CHECKING: | ||
|
||
def __getattr__(self, item: Any) -> Any: | ||
return True | ||
|
||
def __setattr__(self, item: Any, value: Any) -> None: | ||
return None |