Skip to content

Commit

Permalink
fix cache dir not exist error
Browse files Browse the repository at this point in the history
  • Loading branch information
zhaochaoxing committed Jan 19, 2024
1 parent 305f70f commit 101ef1f
Show file tree
Hide file tree
Showing 2 changed files with 4 additions and 6 deletions.
2 changes: 2 additions & 0 deletions dicp/dicp/dynamo_bridge/utils.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
import copy
from pathlib import Path
from typing import Any, Dict, Tuple

import torch.fx
Expand All @@ -7,6 +8,7 @@


def save_cpu_gm(gm: torch.fx.GraphModule, folder: str):
Path(folder).mkdir(exist_ok=True)
cpu_gm = copy_gm_to_cpu(gm)
grap_code = cpu_gm.code
graph_key = code_hash(grap_code)
Expand Down
8 changes: 2 additions & 6 deletions dicp/dicp/vendor/AscendGraph/compile_job.py
Original file line number Diff line number Diff line change
@@ -1,11 +1,9 @@
import os
import subprocess
import time
import base64
import hashlib

from dicp.dynamo_bridge.compile import DeviceCompileJob
from torch._inductor.codecache import pick_vec_isa, cpp_compile_command, write
from torch._inductor.codecache import pick_vec_isa, cpp_compile_command, write, code_hash
from torch._inductor import exc


Expand All @@ -20,15 +18,13 @@ def __init__(self, source_code) -> None:
for file in [source_path, source_include]:
with open(file, 'r') as f:
compile_file_code += f.read()
code_sha256 = hashlib.sha256(compile_file_code.encode("utf-8")).digest()
code_hash = base64.b32encode(code_sha256)[:51].decode("utf-8").lower()
picked_vec_isa = pick_vec_isa()
self._local_rank = int(os.environ.get("LOCAL_RANK", 0))
self._key, self._input_path = write(
source_code.strip(),
"json",
extra=cpp_compile_command("i", "o", vec_isa=picked_vec_isa) +
'local_rank' + str(self._local_rank) + code_hash
'local_rank' + str(self._local_rank) + code_hash(compile_file_code)
)
self._output_graph_path = self._input_path[:-5] + '/graph'
print('output_path: ', self._output_graph_path)
Expand Down

0 comments on commit 101ef1f

Please sign in to comment.