Skip to content

Commit

Permalink
Allow any cache hit/miss/bypass to have emojis, not just fx_graph
Browse files Browse the repository at this point in the history
  • Loading branch information
jamesjwu committed Jan 27, 2025
1 parent 090f5e4 commit a0e27de
Show file tree
Hide file tree
Showing 2 changed files with 7 additions and 7 deletions.
6 changes: 3 additions & 3 deletions src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -89,11 +89,11 @@ fn run_parser<'t>(
if let Some(md) = parser.get_metadata(&e) {
let results = parser.parse(lineno, md, e.rank, &e.compile_id, &payload);
fn extract_suffix(filename: &String) -> String {
if filename.contains("fx_graph_cache_miss") {
if filename.contains("cache_miss") {
"❌".to_string()
} else if filename.contains("fx_graph_cache_hit") {
} else if filename.contains("cache_hit") {
"✅".to_string()
} else if filename.contains("fx_graph_cache_bypass") {
} else if filename.contains("cache_bypass") {
"❓".to_string()
} else {
"".to_string()
Expand Down
8 changes: 4 additions & 4 deletions tests/inputs/cache_hit_miss.log
Original file line number Diff line number Diff line change
Expand Up @@ -2370,7 +2370,7 @@ V1206 15:24:50.260000 1667746 torch/_dynamo/utils.py:1327] {"chromium_event": {}
"pid": 0,
"s": "p"
}
V1206 15:24:50.260000 1667746 torch/_functorch/_aot_autograd/autograd_cache.py:763] {"artifact": {"name": "aotautograd_cache_hash", "encoding": "json"}, "compiled_autograd_id": null, "frame_id": 1, "frame_compile_id": 0, "attempt": 0, "has_payload": "5601d02186053adcc1ba29fd248c1d20"}
V1206 15:24:50.260000 1667746 torch/_functorch/_aot_autograd/autograd_cache.py:763] {"artifact": {"name": "aotautograd_cache_bypass", "encoding": "json"}, "compiled_autograd_id": null, "frame_id": 1, "frame_compile_id": 0, "attempt": 0, "has_payload": "5601d02186053adcc1ba29fd248c1d20"}
{"cache_bypass_reason": "Unsupported call_function target flex_attention. \n Function module: torch.ops.higher_order, \nFunction name: flex_attention", "cache_bypass_hard_exception": false, "key": null, "cache_state": "bypass", "components": [], "compile_id": "1/0"}
V1206 15:24:50.260000 1667746 torch/_dynamo/utils.py:1288] {"chromium_event": {}, "compiled_autograd_id": null, "frame_id": 1, "frame_compile_id": 0, "attempt": 0, "has_payload": "1b940c0ffee7b576f294d159fb3b7a84"}
{
Expand Down Expand Up @@ -4637,7 +4637,7 @@ V1206 15:24:50.603000 1667746 torch/_dynamo/utils.py:1327] {"chromium_event": {}
"pid": 0,
"s": "p"
}
V1206 15:24:50.603000 1667746 torch/_functorch/_aot_autograd/autograd_cache.py:763] {"artifact": {"name": "aotautograd_cache_hash", "encoding": "json"}, "compiled_autograd_id": null, "frame_id": 1, "frame_compile_id": 0, "attempt": 0, "has_payload": "5601d02186053adcc1ba29fd248c1d20"}
V1206 15:24:50.603000 1667746 torch/_functorch/_aot_autograd/autograd_cache.py:763] {"artifact": {"name": "aotautograd_cache_bypass", "encoding": "json"}, "compiled_autograd_id": null, "frame_id": 1, "frame_compile_id": 0, "attempt": 0, "has_payload": "5601d02186053adcc1ba29fd248c1d20"}
{"cache_bypass_reason": "Unsupported call_function target flex_attention. \n Function module: torch.ops.higher_order, \nFunction name: flex_attention", "cache_bypass_hard_exception": false, "key": null, "cache_state": "bypass", "components": [], "compile_id": "1/0"}
V1206 15:24:50.603000 1667746 torch/_dynamo/utils.py:1288] {"chromium_event": {}, "compiled_autograd_id": null, "frame_id": 1, "frame_compile_id": 0, "attempt": 0, "has_payload": "bd634b54ab5138da3c38e2434aae7337"}
{
Expand Down Expand Up @@ -7249,7 +7249,7 @@ V1206 15:24:54.138000 1667746 torch/_dynamo/utils.py:1327] {"chromium_event": {}
"pid": 0,
"s": "p"
}
V1206 15:24:54.138000 1667746 torch/_functorch/_aot_autograd/autograd_cache.py:763] {"artifact": {"name": "aotautograd_cache_hash", "encoding": "json"}, "compiled_autograd_id": null, "frame_id": 1, "frame_compile_id": 0, "attempt": 0, "has_payload": "5601d02186053adcc1ba29fd248c1d20"}
V1206 15:24:54.138000 1667746 torch/_functorch/_aot_autograd/autograd_cache.py:763] {"artifact": {"name": "aotautograd_cache_bypass", "encoding": "json"}, "compiled_autograd_id": null, "frame_id": 1, "frame_compile_id": 0, "attempt": 0, "has_payload": "5601d02186053adcc1ba29fd248c1d20"}
{"cache_bypass_reason": "Unsupported call_function target flex_attention. \n Function module: torch.ops.higher_order, \nFunction name: flex_attention", "cache_bypass_hard_exception": false, "key": null, "cache_state": "bypass", "components": [], "compile_id": "1/0"}
V1206 15:24:54.139000 1667746 torch/_dynamo/utils.py:1288] {"chromium_event": {}, "compiled_autograd_id": null, "frame_id": 1, "frame_compile_id": 0, "attempt": 0, "has_payload": "cca36f8b4f9f5ea8ad866b857a634eb8"}
{
Expand Down Expand Up @@ -7538,4 +7538,4 @@ V1206 15:24:54.156000 1667746 torch/_dynamo/utils.py:1288] {"chromium_event": {}
"cat": "dynamo_timed",
"tid": 0,
"pid": 0
}
}

0 comments on commit a0e27de

Please sign in to comment.