mirror of
https://github.com/xai-org/grok-1.git
synced 2024-11-23 12:09:52 +03:00
get_load_path_str() -> get_load_path_str_cached(): Optimizing Regex Operations with Caching
This commit is contained in:
parent
3fd4e7c4d7
commit
5fc82399bf
@ -26,6 +26,10 @@ import tempfile
|
|||||||
from concurrent.futures import ThreadPoolExecutor, wait
|
from concurrent.futures import ThreadPoolExecutor, wait
|
||||||
from typing import Any, Optional
|
from typing import Any, Optional
|
||||||
|
|
||||||
|
# For get_load_path_str
|
||||||
|
# A simple caching mechanism to avoid recomputing regex matches for paths that have already been processed.
|
||||||
|
from functools import lru_cache
|
||||||
|
|
||||||
import jax
|
import jax
|
||||||
import numpy as np
|
import numpy as np
|
||||||
from jax.experimental import multihost_utils
|
from jax.experimental import multihost_utils
|
||||||
@ -133,6 +137,22 @@ def path_tuple_to_string(path: tuple) -> str:
|
|||||||
return "/".join(pieces)
|
return "/".join(pieces)
|
||||||
|
|
||||||
|
|
||||||
|
"""
|
||||||
|
For get_load_path_str(),
|
||||||
|
introducing a simple caching mechanism to avoid recomputing regex matches for paths that have already been processed.
|
||||||
|
"""
|
||||||
|
@lru_cache(maxsize=None)
|
||||||
|
def get_load_path_str_cached(
|
||||||
|
init_path_str: str,
|
||||||
|
load_rename_rules: Optional[list[tuple[str, str]]] = None,
|
||||||
|
load_exclude_rules: Optional[list[str]] = None,
|
||||||
|
) -> Optional[str]:
|
||||||
|
return get_load_path_str(
|
||||||
|
init_path_str,
|
||||||
|
load_rename_rules,
|
||||||
|
load_exclude_rules
|
||||||
|
)
|
||||||
|
|
||||||
def get_load_path_str(
|
def get_load_path_str(
|
||||||
init_path_str: str,
|
init_path_str: str,
|
||||||
load_rename_rules: Optional[list[tuple[str, str]]] = None,
|
load_rename_rules: Optional[list[tuple[str, str]]] = None,
|
||||||
@ -171,7 +191,7 @@ def replace_with_load_state(
|
|||||||
data_model_shards = math.prod(mesh_config)
|
data_model_shards = math.prod(mesh_config)
|
||||||
for i, (init_path, tensor) in enumerate(flatten_init):
|
for i, (init_path, tensor) in enumerate(flatten_init):
|
||||||
init_path_str = path_tuple_to_string(init_path)
|
init_path_str = path_tuple_to_string(init_path)
|
||||||
load_path_str = get_load_path_str(init_path_str, load_rename_rules, load_exclude_rules)
|
load_path_str = get_load_path_str_cached(init_path_str, load_rename_rules, load_exclude_rules)
|
||||||
if load_path_str is None:
|
if load_path_str is None:
|
||||||
rank_logger.info(f"Excluded from restore: {init_path_str}.")
|
rank_logger.info(f"Excluded from restore: {init_path_str}.")
|
||||||
replaced.append(tensor)
|
replaced.append(tensor)
|
||||||
|
Loading…
Reference in New Issue
Block a user