dacorvo HF Staff commited on
Commit
ce6c70a
·
verified ·
1 Parent(s): f7dd6a1

Synchronizing local compiler cache.

Browse files
This view is limited to 50 files because it contains too many changes.   See raw diff
Files changed (50) hide show
  1. .gitattributes +27 -0
  2. neuronxcc-2.21.33363.0+82129205/0_REGISTRY/0.4.3.dev3/granite/hf-internal-testing/tiny-random-GraniteForCausalLM/822b61c3a021502bf290.json +59 -0
  3. neuronxcc-2.21.33363.0+82129205/0_REGISTRY/0.4.3.dev3/llama/llamafactory/tiny-random-Llama-3/9587f75fe03d32199424.json +63 -0
  4. neuronxcc-2.21.33363.0+82129205/0_REGISTRY/0.4.3.dev3/llama4_text/tiny-random/llama-4/663adc121ad6f0bf6c41.json +82 -0
  5. neuronxcc-2.21.33363.0+82129205/0_REGISTRY/0.4.3.dev3/mixtral/dacorvo/Mixtral-tiny/2115b9307dea032bae46.json +58 -0
  6. neuronxcc-2.21.33363.0+82129205/0_REGISTRY/0.4.3.dev3/mixtral/dacorvo/Mixtral-tiny/6c78d4e7067fedb8c6d8.json +59 -0
  7. neuronxcc-2.21.33363.0+82129205/0_REGISTRY/0.4.3.dev3/phi3/yujiepan/phi-4-tiny-random/9830c1de6c2e8faed0c8.json +60 -0
  8. neuronxcc-2.21.33363.0+82129205/0_REGISTRY/0.4.3.dev3/qwen2/yujiepan/qwen2.5-128k-tiny-random/d2ffa119e8a8f246f65e.json +65 -0
  9. neuronxcc-2.21.33363.0+82129205/0_REGISTRY/0.4.3.dev3/qwen3_moe/optimum-internal-testing/tiny-random-qwen3_moe/35684b74302809c36482.json +66 -0
  10. neuronxcc-2.21.33363.0+82129205/0_REGISTRY/0.4.3.dev3/smollm3/HuggingFaceTB/SmolLM3-3B/9a47a3a9f135594c847d.json +135 -0
  11. neuronxcc-2.21.33363.0+82129205/MODULE_01328268a19023ed0f24+f8e6d902/compile_flags.json +1 -0
  12. neuronxcc-2.21.33363.0+82129205/MODULE_01328268a19023ed0f24+f8e6d902/model.done +0 -0
  13. neuronxcc-2.21.33363.0+82129205/MODULE_01328268a19023ed0f24+f8e6d902/model.hlo_module.pb +3 -0
  14. neuronxcc-2.21.33363.0+82129205/MODULE_01328268a19023ed0f24+f8e6d902/model.neff +3 -0
  15. neuronxcc-2.21.33363.0+82129205/MODULE_137f98143cdeb07dab7d+a02c3a36/compile_flags.json +1 -0
  16. neuronxcc-2.21.33363.0+82129205/MODULE_137f98143cdeb07dab7d+a02c3a36/model.done +0 -0
  17. neuronxcc-2.21.33363.0+82129205/MODULE_137f98143cdeb07dab7d+a02c3a36/model.hlo_module.pb +3 -0
  18. neuronxcc-2.21.33363.0+82129205/MODULE_137f98143cdeb07dab7d+a02c3a36/model.neff +3 -0
  19. neuronxcc-2.21.33363.0+82129205/MODULE_137f98143cdeb07dab7d+a02c3a36/wrapped_neff.hlo +3 -0
  20. neuronxcc-2.21.33363.0+82129205/MODULE_1d4bedf0c37a53b47737+a02c3a36/compile_flags.json +1 -0
  21. neuronxcc-2.21.33363.0+82129205/MODULE_1d4bedf0c37a53b47737+a02c3a36/model.done +0 -0
  22. neuronxcc-2.21.33363.0+82129205/MODULE_1d4bedf0c37a53b47737+a02c3a36/model.hlo_module.pb +3 -0
  23. neuronxcc-2.21.33363.0+82129205/MODULE_1d4bedf0c37a53b47737+a02c3a36/model.neff +3 -0
  24. neuronxcc-2.21.33363.0+82129205/MODULE_1d4bedf0c37a53b47737+a02c3a36/wrapped_neff.hlo +3 -0
  25. neuronxcc-2.21.33363.0+82129205/MODULE_2672b897bf6327bea9e4+24129607/compile_flags.json +1 -0
  26. neuronxcc-2.21.33363.0+82129205/MODULE_2672b897bf6327bea9e4+24129607/model.done +0 -0
  27. neuronxcc-2.21.33363.0+82129205/MODULE_2672b897bf6327bea9e4+24129607/model.hlo_module.pb +3 -0
  28. neuronxcc-2.21.33363.0+82129205/MODULE_2672b897bf6327bea9e4+24129607/model.neff +3 -0
  29. neuronxcc-2.21.33363.0+82129205/MODULE_31e31b6f47afb89b553b+bafdbdde/compile_flags.json +1 -0
  30. neuronxcc-2.21.33363.0+82129205/MODULE_31e31b6f47afb89b553b+bafdbdde/model.done +0 -0
  31. neuronxcc-2.21.33363.0+82129205/MODULE_31e31b6f47afb89b553b+bafdbdde/model.hlo_module.pb +3 -0
  32. neuronxcc-2.21.33363.0+82129205/MODULE_31e31b6f47afb89b553b+bafdbdde/model.neff +3 -0
  33. neuronxcc-2.21.33363.0+82129205/MODULE_31e31b6f47afb89b553b+bafdbdde/wrapped_neff.hlo +3 -0
  34. neuronxcc-2.21.33363.0+82129205/MODULE_31e31b6f47afb89b553b+da15b874/compile_flags.json +1 -0
  35. neuronxcc-2.21.33363.0+82129205/MODULE_31e31b6f47afb89b553b+da15b874/model.hlo_module.pb +3 -0
  36. neuronxcc-2.21.33363.0+82129205/MODULE_31e31b6f47afb89b553b+da15b874/model.log +5 -0
  37. neuronxcc-2.21.33363.0+82129205/MODULE_4247663625e2eaeb4fab+a02c3a36/compile_flags.json +1 -0
  38. neuronxcc-2.21.33363.0+82129205/MODULE_4247663625e2eaeb4fab+a02c3a36/model.done +0 -0
  39. neuronxcc-2.21.33363.0+82129205/MODULE_4247663625e2eaeb4fab+a02c3a36/model.hlo_module.pb +3 -0
  40. neuronxcc-2.21.33363.0+82129205/MODULE_4247663625e2eaeb4fab+a02c3a36/model.neff +3 -0
  41. neuronxcc-2.21.33363.0+82129205/MODULE_4247663625e2eaeb4fab+a02c3a36/wrapped_neff.hlo +3 -0
  42. neuronxcc-2.21.33363.0+82129205/MODULE_5960bdedd7549bfacf7a+24129607/compile_flags.json +1 -0
  43. neuronxcc-2.21.33363.0+82129205/MODULE_5960bdedd7549bfacf7a+24129607/model.done +0 -0
  44. neuronxcc-2.21.33363.0+82129205/MODULE_5960bdedd7549bfacf7a+24129607/model.hlo_module.pb +3 -0
  45. neuronxcc-2.21.33363.0+82129205/MODULE_5960bdedd7549bfacf7a+24129607/model.neff +3 -0
  46. neuronxcc-2.21.33363.0+82129205/MODULE_73021bd2bcd85d0ae281+bafdbdde/compile_flags.json +1 -0
  47. neuronxcc-2.21.33363.0+82129205/MODULE_73021bd2bcd85d0ae281+bafdbdde/model.done +0 -0
  48. neuronxcc-2.21.33363.0+82129205/MODULE_73021bd2bcd85d0ae281+bafdbdde/model.hlo_module.pb +3 -0
  49. neuronxcc-2.21.33363.0+82129205/MODULE_73021bd2bcd85d0ae281+bafdbdde/model.neff +3 -0
  50. neuronxcc-2.21.33363.0+82129205/MODULE_73021bd2bcd85d0ae281+bafdbdde/wrapped_neff.hlo +3 -0
.gitattributes CHANGED
@@ -5739,3 +5739,30 @@ neuronxcc-2.21.33363.0+82129205/MODULE_fd44e24d672157f0bc14+a02c3a36/model.neff
5739
  neuronxcc-2.21.33363.0+82129205/MODULE_fd44e24d672157f0bc14+a02c3a36/wrapped_neff.hlo filter=lfs diff=lfs merge=lfs -text
5740
  neuronxcc-2.21.33363.0+82129205/MODULE_ecafd5c328f53085c3ac+a02c3a36/model.neff filter=lfs diff=lfs merge=lfs -text
5741
  neuronxcc-2.21.33363.0+82129205/MODULE_ecafd5c328f53085c3ac+a02c3a36/wrapped_neff.hlo filter=lfs diff=lfs merge=lfs -text
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
5739
  neuronxcc-2.21.33363.0+82129205/MODULE_fd44e24d672157f0bc14+a02c3a36/wrapped_neff.hlo filter=lfs diff=lfs merge=lfs -text
5740
  neuronxcc-2.21.33363.0+82129205/MODULE_ecafd5c328f53085c3ac+a02c3a36/model.neff filter=lfs diff=lfs merge=lfs -text
5741
  neuronxcc-2.21.33363.0+82129205/MODULE_ecafd5c328f53085c3ac+a02c3a36/wrapped_neff.hlo filter=lfs diff=lfs merge=lfs -text
5742
+ neuronxcc-2.21.33363.0+82129205/MODULE_01328268a19023ed0f24+f8e6d902/model.neff filter=lfs diff=lfs merge=lfs -text
5743
+ neuronxcc-2.21.33363.0+82129205/MODULE_137f98143cdeb07dab7d+a02c3a36/model.neff filter=lfs diff=lfs merge=lfs -text
5744
+ neuronxcc-2.21.33363.0+82129205/MODULE_137f98143cdeb07dab7d+a02c3a36/wrapped_neff.hlo filter=lfs diff=lfs merge=lfs -text
5745
+ neuronxcc-2.21.33363.0+82129205/MODULE_1d4bedf0c37a53b47737+a02c3a36/model.neff filter=lfs diff=lfs merge=lfs -text
5746
+ neuronxcc-2.21.33363.0+82129205/MODULE_1d4bedf0c37a53b47737+a02c3a36/wrapped_neff.hlo filter=lfs diff=lfs merge=lfs -text
5747
+ neuronxcc-2.21.33363.0+82129205/MODULE_2672b897bf6327bea9e4+24129607/model.neff filter=lfs diff=lfs merge=lfs -text
5748
+ neuronxcc-2.21.33363.0+82129205/MODULE_31e31b6f47afb89b553b+bafdbdde/model.neff filter=lfs diff=lfs merge=lfs -text
5749
+ neuronxcc-2.21.33363.0+82129205/MODULE_31e31b6f47afb89b553b+bafdbdde/wrapped_neff.hlo filter=lfs diff=lfs merge=lfs -text
5750
+ neuronxcc-2.21.33363.0+82129205/MODULE_4247663625e2eaeb4fab+a02c3a36/model.neff filter=lfs diff=lfs merge=lfs -text
5751
+ neuronxcc-2.21.33363.0+82129205/MODULE_4247663625e2eaeb4fab+a02c3a36/wrapped_neff.hlo filter=lfs diff=lfs merge=lfs -text
5752
+ neuronxcc-2.21.33363.0+82129205/MODULE_5960bdedd7549bfacf7a+24129607/model.neff filter=lfs diff=lfs merge=lfs -text
5753
+ neuronxcc-2.21.33363.0+82129205/MODULE_73021bd2bcd85d0ae281+bafdbdde/model.neff filter=lfs diff=lfs merge=lfs -text
5754
+ neuronxcc-2.21.33363.0+82129205/MODULE_73021bd2bcd85d0ae281+bafdbdde/wrapped_neff.hlo filter=lfs diff=lfs merge=lfs -text
5755
+ neuronxcc-2.21.33363.0+82129205/MODULE_8f63e2da128b2efd447e+a02c3a36/model.neff filter=lfs diff=lfs merge=lfs -text
5756
+ neuronxcc-2.21.33363.0+82129205/MODULE_8f63e2da128b2efd447e+a02c3a36/wrapped_neff.hlo filter=lfs diff=lfs merge=lfs -text
5757
+ neuronxcc-2.21.33363.0+82129205/MODULE_9bc06d024b9ce47e5f56+24129607/model.neff filter=lfs diff=lfs merge=lfs -text
5758
+ neuronxcc-2.21.33363.0+82129205/MODULE_9d52760610a001af812a+f8e6d902/model.neff filter=lfs diff=lfs merge=lfs -text
5759
+ neuronxcc-2.21.33363.0+82129205/MODULE_ad7c8ac474ec4bdeae52+24129607/model.neff filter=lfs diff=lfs merge=lfs -text
5760
+ neuronxcc-2.21.33363.0+82129205/MODULE_b0d91b1607c7df83fc7f+a02c3a36/model.neff filter=lfs diff=lfs merge=lfs -text
5761
+ neuronxcc-2.21.33363.0+82129205/MODULE_b0d91b1607c7df83fc7f+a02c3a36/wrapped_neff.hlo filter=lfs diff=lfs merge=lfs -text
5762
+ neuronxcc-2.21.33363.0+82129205/MODULE_c26856b929d58ebddc23+24129607/model.neff filter=lfs diff=lfs merge=lfs -text
5763
+ neuronxcc-2.21.33363.0+82129205/MODULE_cb6b880fc977b7ae4bda+24129607/model.neff filter=lfs diff=lfs merge=lfs -text
5764
+ neuronxcc-2.21.33363.0+82129205/MODULE_cbda8a59038c9c5ffc17+a02c3a36/model.neff filter=lfs diff=lfs merge=lfs -text
5765
+ neuronxcc-2.21.33363.0+82129205/MODULE_cbda8a59038c9c5ffc17+a02c3a36/wrapped_neff.hlo filter=lfs diff=lfs merge=lfs -text
5766
+ neuronxcc-2.21.33363.0+82129205/MODULE_e899b698a237c528bb16+24129607/model.neff filter=lfs diff=lfs merge=lfs -text
5767
+ neuronxcc-2.21.33363.0+82129205/MODULE_fcaa5220788f46dccc4f+a02c3a36/model.neff filter=lfs diff=lfs merge=lfs -text
5768
+ neuronxcc-2.21.33363.0+82129205/MODULE_fcaa5220788f46dccc4f+a02c3a36/wrapped_neff.hlo filter=lfs diff=lfs merge=lfs -text
neuronxcc-2.21.33363.0+82129205/0_REGISTRY/0.4.3.dev3/granite/hf-internal-testing/tiny-random-GraniteForCausalLM/822b61c3a021502bf290.json ADDED
@@ -0,0 +1,59 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_entry_class": "SingleModelCacheEntry",
3
+ "_model_id": "hf-internal-testing/tiny-random-GraniteForCausalLM",
4
+ "_task": "text-generation",
5
+ "architectures": [
6
+ "GraniteForCausalLM"
7
+ ],
8
+ "attention_bias": false,
9
+ "attention_dropout": 0.0,
10
+ "attention_multiplier": 1.0,
11
+ "dtype": "float32",
12
+ "embedding_multiplier": 1.0,
13
+ "hidden_act": "silu",
14
+ "hidden_size": 32,
15
+ "initializer_range": 0.02,
16
+ "intermediate_size": 64,
17
+ "logits_scaling": 1.0,
18
+ "max_position_embeddings": 2048,
19
+ "mlp_bias": false,
20
+ "model_type": "granite",
21
+ "neuron": {
22
+ "_serialized_key": "NxDNeuronConfig",
23
+ "batch_size": 1,
24
+ "capacity_factor": null,
25
+ "checkpoint_id": "hf-internal-testing/tiny-random-GraniteForCausalLM",
26
+ "checkpoint_revision": "c3074ebc0ac2fe545305f5e5f6cce2cc9b2aa0c5",
27
+ "continuous_batching": false,
28
+ "ep_degree": 1,
29
+ "fused_qkv": true,
30
+ "glu_mlp": true,
31
+ "local_ranks_size": 2,
32
+ "max_batch_size": 1,
33
+ "max_context_length": 1024,
34
+ "max_topk": 256,
35
+ "n_active_tokens": 1024,
36
+ "neuronxcc_version": "2.21.33363.0+82129205",
37
+ "on_device_sampling": true,
38
+ "optimum_neuron_version": "0.4.3.dev3",
39
+ "output_logits": false,
40
+ "pp_degree": 1,
41
+ "sequence_length": 1024,
42
+ "sequence_parallel_enabled": false,
43
+ "speculation_length": 0,
44
+ "start_rank_id": 0,
45
+ "target": "trn1",
46
+ "torch_dtype": "float32",
47
+ "tp_degree": 2
48
+ },
49
+ "num_attention_heads": 4,
50
+ "num_hidden_layers": 2,
51
+ "num_key_value_heads": 4,
52
+ "residual_multiplier": 1.0,
53
+ "rms_norm_eps": 1e-06,
54
+ "rope_scaling": null,
55
+ "rope_theta": 10000.0,
56
+ "tie_word_embeddings": false,
57
+ "use_cache": true,
58
+ "vocab_size": 49152
59
+ }
neuronxcc-2.21.33363.0+82129205/0_REGISTRY/0.4.3.dev3/llama/llamafactory/tiny-random-Llama-3/9587f75fe03d32199424.json ADDED
@@ -0,0 +1,63 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_entry_class": "SingleModelCacheEntry",
3
+ "_model_id": "llamafactory/tiny-random-Llama-3",
4
+ "_task": "text-generation",
5
+ "architectures": [
6
+ "LlamaForCausalLM"
7
+ ],
8
+ "attention_bias": false,
9
+ "attention_dropout": 0.0,
10
+ "dtype": "float16",
11
+ "head_dim": 4,
12
+ "hidden_act": "silu",
13
+ "hidden_size": 16,
14
+ "initializer_range": 0.02,
15
+ "intermediate_size": 64,
16
+ "max_position_embeddings": 131072,
17
+ "mlp_bias": false,
18
+ "model_type": "llama",
19
+ "neuron": {
20
+ "_serialized_key": "NxDNeuronConfig",
21
+ "batch_size": 1,
22
+ "capacity_factor": null,
23
+ "checkpoint_id": "llamafactory/tiny-random-Llama-3",
24
+ "checkpoint_revision": "bf2a2e3bf199ad2ee96f02a3c00246c608db22a8",
25
+ "continuous_batching": false,
26
+ "ep_degree": 1,
27
+ "fused_qkv": true,
28
+ "glu_mlp": true,
29
+ "local_ranks_size": 2,
30
+ "max_batch_size": 1,
31
+ "max_context_length": 1024,
32
+ "max_topk": 256,
33
+ "n_active_tokens": 1024,
34
+ "neuronxcc_version": "2.21.33363.0+82129205",
35
+ "on_device_sampling": true,
36
+ "optimum_neuron_version": "0.4.3.dev3",
37
+ "output_logits": false,
38
+ "pp_degree": 1,
39
+ "sequence_length": 1024,
40
+ "sequence_parallel_enabled": false,
41
+ "speculation_length": 0,
42
+ "start_rank_id": 0,
43
+ "target": "trn1",
44
+ "torch_dtype": "float16",
45
+ "tp_degree": 2
46
+ },
47
+ "num_attention_heads": 4,
48
+ "num_hidden_layers": 2,
49
+ "num_key_value_heads": 4,
50
+ "pretraining_tp": 1,
51
+ "rms_norm_eps": 1e-05,
52
+ "rope_scaling": {
53
+ "factor": 8.0,
54
+ "high_freq_factor": 4.0,
55
+ "low_freq_factor": 1.0,
56
+ "original_max_position_embeddings": 8192,
57
+ "rope_type": "llama3"
58
+ },
59
+ "rope_theta": 500000.0,
60
+ "tie_word_embeddings": false,
61
+ "use_cache": true,
62
+ "vocab_size": 128256
63
+ }
neuronxcc-2.21.33363.0+82129205/0_REGISTRY/0.4.3.dev3/llama4_text/tiny-random/llama-4/663adc121ad6f0bf6c41.json ADDED
@@ -0,0 +1,82 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_entry_class": "SingleModelCacheEntry",
3
+ "_model_id": "tiny-random/llama-4",
4
+ "_task": "text-generation",
5
+ "attention_bias": false,
6
+ "attention_chunk_size": 128,
7
+ "attention_dropout": 0.0,
8
+ "attn_scale": 0.1,
9
+ "attn_temperature_tuning": 4,
10
+ "cache_implementation": "hybrid",
11
+ "dtype": "bfloat16",
12
+ "floor_scale": 8192,
13
+ "for_llm_compressor": false,
14
+ "head_dim": 32,
15
+ "hidden_act": "silu",
16
+ "hidden_size": 32,
17
+ "initializer_range": 0.02,
18
+ "interleave_moe_layer_step": 2,
19
+ "intermediate_size": 64,
20
+ "intermediate_size_mlp": 128,
21
+ "layer_types": [
22
+ "chunked_attention",
23
+ "chunked_attention",
24
+ "chunked_attention",
25
+ "full_attention"
26
+ ],
27
+ "max_position_embeddings": 1048576,
28
+ "model_type": "llama4_text",
29
+ "moe_layers": [
30
+ 1,
31
+ 3
32
+ ],
33
+ "neuron": {
34
+ "_serialized_key": "NxDNeuronConfig",
35
+ "batch_size": 1,
36
+ "capacity_factor": null,
37
+ "checkpoint_id": "tiny-random/llama-4",
38
+ "checkpoint_revision": "9e716f5d4d1ffe0a44a15f46f4a12b840439aba4",
39
+ "continuous_batching": false,
40
+ "ep_degree": 1,
41
+ "fused_qkv": false,
42
+ "glu_mlp": true,
43
+ "local_ranks_size": 2,
44
+ "max_batch_size": 1,
45
+ "max_context_length": 1024,
46
+ "max_topk": 256,
47
+ "n_active_tokens": 1024,
48
+ "neuronxcc_version": "2.21.33363.0+82129205",
49
+ "on_device_sampling": true,
50
+ "optimum_neuron_version": "0.4.3.dev3",
51
+ "output_logits": false,
52
+ "pp_degree": 1,
53
+ "sequence_length": 1024,
54
+ "sequence_parallel_enabled": false,
55
+ "speculation_length": 0,
56
+ "start_rank_id": 0,
57
+ "target": "trn1",
58
+ "torch_dtype": "bfloat16",
59
+ "tp_degree": 2
60
+ },
61
+ "no_rope_layers": [
62
+ 1,
63
+ 1,
64
+ 1,
65
+ 0
66
+ ],
67
+ "num_attention_heads": 1,
68
+ "num_experts_per_tok": 1,
69
+ "num_hidden_layers": 4,
70
+ "num_key_value_heads": 1,
71
+ "num_local_experts": 8,
72
+ "output_router_logits": false,
73
+ "rms_norm_eps": 1e-05,
74
+ "rope_scaling": null,
75
+ "rope_theta": 500000.0,
76
+ "router_aux_loss_coef": 0.001,
77
+ "router_jitter_noise": 0.0,
78
+ "tie_word_embeddings": true,
79
+ "use_cache": true,
80
+ "use_qk_norm": true,
81
+ "vocab_size": 202048
82
+ }
neuronxcc-2.21.33363.0+82129205/0_REGISTRY/0.4.3.dev3/mixtral/dacorvo/Mixtral-tiny/2115b9307dea032bae46.json ADDED
@@ -0,0 +1,58 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_entry_class": "SingleModelCacheEntry",
3
+ "_model_id": "dacorvo/Mixtral-tiny",
4
+ "_task": "text-generation",
5
+ "architectures": [
6
+ "MixtralForCausalLM"
7
+ ],
8
+ "attention_dropout": 0.0,
9
+ "dtype": "float16",
10
+ "head_dim": 32,
11
+ "hidden_act": "silu",
12
+ "hidden_size": 1024,
13
+ "initializer_range": 0.02,
14
+ "intermediate_size": 3584,
15
+ "max_position_embeddings": 1024,
16
+ "model_type": "mixtral",
17
+ "neuron": {
18
+ "_serialized_key": "NxDNeuronConfig",
19
+ "batch_size": 1,
20
+ "capacity_factor": null,
21
+ "checkpoint_id": "dacorvo/Mixtral-tiny",
22
+ "checkpoint_revision": "c557ba205ddff6ea911f4719e0d543d6c08356b6",
23
+ "continuous_batching": false,
24
+ "ep_degree": 1,
25
+ "fused_qkv": false,
26
+ "glu_mlp": true,
27
+ "local_ranks_size": 2,
28
+ "max_batch_size": 1,
29
+ "max_context_length": 1024,
30
+ "max_topk": 256,
31
+ "n_active_tokens": 1024,
32
+ "neuronxcc_version": "2.21.33363.0+82129205",
33
+ "on_device_sampling": false,
34
+ "optimum_neuron_version": "0.4.3.dev3",
35
+ "output_logits": false,
36
+ "pp_degree": 1,
37
+ "sequence_length": 1024,
38
+ "speculation_length": 0,
39
+ "start_rank_id": 0,
40
+ "target": "trn1",
41
+ "torch_dtype": "float16",
42
+ "tp_degree": 2
43
+ },
44
+ "num_attention_heads": 32,
45
+ "num_experts_per_tok": 2,
46
+ "num_hidden_layers": 2,
47
+ "num_key_value_heads": 8,
48
+ "num_local_experts": 8,
49
+ "output_router_logits": false,
50
+ "rms_norm_eps": 1e-05,
51
+ "rope_theta": 10000.0,
52
+ "router_aux_loss_coef": 0.001,
53
+ "router_jitter_noise": 0.0,
54
+ "sliding_window": 4096,
55
+ "tie_word_embeddings": false,
56
+ "use_cache": true,
57
+ "vocab_size": 32000
58
+ }
neuronxcc-2.21.33363.0+82129205/0_REGISTRY/0.4.3.dev3/mixtral/dacorvo/Mixtral-tiny/6c78d4e7067fedb8c6d8.json ADDED
@@ -0,0 +1,59 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_entry_class": "SingleModelCacheEntry",
3
+ "_model_id": "dacorvo/Mixtral-tiny",
4
+ "_task": "text-generation",
5
+ "architectures": [
6
+ "MixtralForCausalLM"
7
+ ],
8
+ "attention_dropout": 0.0,
9
+ "dtype": "float16",
10
+ "head_dim": 32,
11
+ "hidden_act": "silu",
12
+ "hidden_size": 1024,
13
+ "initializer_range": 0.02,
14
+ "intermediate_size": 3584,
15
+ "max_position_embeddings": 1024,
16
+ "model_type": "mixtral",
17
+ "neuron": {
18
+ "_serialized_key": "NxDNeuronConfig",
19
+ "batch_size": 1,
20
+ "capacity_factor": null,
21
+ "checkpoint_id": "dacorvo/Mixtral-tiny",
22
+ "checkpoint_revision": "c557ba205ddff6ea911f4719e0d543d6c08356b6",
23
+ "continuous_batching": false,
24
+ "ep_degree": 1,
25
+ "fused_qkv": false,
26
+ "glu_mlp": true,
27
+ "local_ranks_size": 2,
28
+ "max_batch_size": 1,
29
+ "max_context_length": 1024,
30
+ "max_topk": 256,
31
+ "n_active_tokens": 1024,
32
+ "neuronxcc_version": "2.21.33363.0+82129205",
33
+ "on_device_sampling": false,
34
+ "optimum_neuron_version": "0.4.3.dev3",
35
+ "output_logits": false,
36
+ "pp_degree": 1,
37
+ "sequence_length": 1024,
38
+ "sequence_parallel_enabled": false,
39
+ "speculation_length": 0,
40
+ "start_rank_id": 0,
41
+ "target": "trn1",
42
+ "torch_dtype": "float16",
43
+ "tp_degree": 2
44
+ },
45
+ "num_attention_heads": 32,
46
+ "num_experts_per_tok": 2,
47
+ "num_hidden_layers": 2,
48
+ "num_key_value_heads": 8,
49
+ "num_local_experts": 8,
50
+ "output_router_logits": false,
51
+ "rms_norm_eps": 1e-05,
52
+ "rope_theta": 10000.0,
53
+ "router_aux_loss_coef": 0.001,
54
+ "router_jitter_noise": 0.0,
55
+ "sliding_window": 4096,
56
+ "tie_word_embeddings": false,
57
+ "use_cache": true,
58
+ "vocab_size": 32000
59
+ }
neuronxcc-2.21.33363.0+82129205/0_REGISTRY/0.4.3.dev3/phi3/yujiepan/phi-4-tiny-random/9830c1de6c2e8faed0c8.json ADDED
@@ -0,0 +1,60 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_entry_class": "SingleModelCacheEntry",
3
+ "_model_id": "yujiepan/phi-4-tiny-random",
4
+ "_task": "text-generation",
5
+ "architectures": [
6
+ "Phi3ForCausalLM"
7
+ ],
8
+ "attention_bias": false,
9
+ "attention_dropout": 0.0,
10
+ "auto_map": {},
11
+ "dtype": "bfloat16",
12
+ "embd_pdrop": 0.0,
13
+ "hidden_act": "silu",
14
+ "hidden_size": 16,
15
+ "initializer_range": 0.02,
16
+ "intermediate_size": 32,
17
+ "max_position_embeddings": 16384,
18
+ "model_type": "phi3",
19
+ "neuron": {
20
+ "_serialized_key": "NxDNeuronConfig",
21
+ "batch_size": 1,
22
+ "capacity_factor": null,
23
+ "checkpoint_id": "yujiepan/phi-4-tiny-random",
24
+ "checkpoint_revision": "18a9a1168dc97ac6d128f811925670c275610f5a",
25
+ "continuous_batching": false,
26
+ "ep_degree": 1,
27
+ "fused_qkv": true,
28
+ "glu_mlp": true,
29
+ "local_ranks_size": 2,
30
+ "max_batch_size": 1,
31
+ "max_context_length": 1024,
32
+ "max_topk": 256,
33
+ "n_active_tokens": 1024,
34
+ "neuronxcc_version": "2.21.33363.0+82129205",
35
+ "on_device_sampling": true,
36
+ "optimum_neuron_version": "0.4.3.dev3",
37
+ "output_logits": false,
38
+ "pp_degree": 1,
39
+ "sequence_length": 1024,
40
+ "sequence_parallel_enabled": false,
41
+ "speculation_length": 0,
42
+ "start_rank_id": 0,
43
+ "target": "trn1",
44
+ "torch_dtype": "bfloat16",
45
+ "tp_degree": 2
46
+ },
47
+ "num_attention_heads": 2,
48
+ "num_hidden_layers": 2,
49
+ "num_key_value_heads": 1,
50
+ "original_max_position_embeddings": 16384,
51
+ "partial_rotary_factor": 1.0,
52
+ "resid_pdrop": 0.0,
53
+ "rms_norm_eps": 1e-05,
54
+ "rope_scaling": null,
55
+ "rope_theta": 250000,
56
+ "sliding_window": null,
57
+ "tie_word_embeddings": false,
58
+ "use_cache": true,
59
+ "vocab_size": 100352
60
+ }
neuronxcc-2.21.33363.0+82129205/0_REGISTRY/0.4.3.dev3/qwen2/yujiepan/qwen2.5-128k-tiny-random/d2ffa119e8a8f246f65e.json ADDED
@@ -0,0 +1,65 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_entry_class": "SingleModelCacheEntry",
3
+ "_model_id": "yujiepan/qwen2.5-128k-tiny-random",
4
+ "_task": "text-generation",
5
+ "architectures": [
6
+ "Qwen2ForCausalLM"
7
+ ],
8
+ "attention_dropout": 0.0,
9
+ "dtype": "bfloat16",
10
+ "hidden_act": "silu",
11
+ "hidden_size": 8,
12
+ "initializer_range": 0.02,
13
+ "intermediate_size": 16,
14
+ "layer_types": [
15
+ "full_attention",
16
+ "full_attention"
17
+ ],
18
+ "max_position_embeddings": 32768,
19
+ "max_window_layers": 1,
20
+ "model_type": "qwen2",
21
+ "neuron": {
22
+ "_serialized_key": "NxDNeuronConfig",
23
+ "batch_size": 1,
24
+ "capacity_factor": null,
25
+ "checkpoint_id": "yujiepan/qwen2.5-128k-tiny-random",
26
+ "checkpoint_revision": "c8296d4ca3f87782876d2382fbb6481d1beb8ef0",
27
+ "continuous_batching": false,
28
+ "ep_degree": 1,
29
+ "fused_qkv": false,
30
+ "glu_mlp": true,
31
+ "local_ranks_size": 2,
32
+ "max_batch_size": 1,
33
+ "max_context_length": 1024,
34
+ "max_topk": 256,
35
+ "n_active_tokens": 1024,
36
+ "neuronxcc_version": "2.21.33363.0+82129205",
37
+ "on_device_sampling": true,
38
+ "optimum_neuron_version": "0.4.3.dev3",
39
+ "output_logits": false,
40
+ "pp_degree": 1,
41
+ "sequence_length": 1024,
42
+ "sequence_parallel_enabled": false,
43
+ "speculation_length": 0,
44
+ "start_rank_id": 0,
45
+ "target": "trn1",
46
+ "torch_dtype": "bfloat16",
47
+ "tp_degree": 2
48
+ },
49
+ "num_attention_heads": 4,
50
+ "num_hidden_layers": 2,
51
+ "num_key_value_heads": 2,
52
+ "rms_norm_eps": 1e-06,
53
+ "rope_scaling": {
54
+ "factor": 4.0,
55
+ "original_max_position_embeddings": 32768,
56
+ "rope_type": "yarn",
57
+ "type": "yarn"
58
+ },
59
+ "rope_theta": 1000000.0,
60
+ "sliding_window": null,
61
+ "tie_word_embeddings": false,
62
+ "use_cache": true,
63
+ "use_sliding_window": false,
64
+ "vocab_size": 152064
65
+ }
neuronxcc-2.21.33363.0+82129205/0_REGISTRY/0.4.3.dev3/qwen3_moe/optimum-internal-testing/tiny-random-qwen3_moe/35684b74302809c36482.json ADDED
@@ -0,0 +1,66 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_entry_class": "SingleModelCacheEntry",
3
+ "_model_id": "optimum-internal-testing/tiny-random-qwen3_moe",
4
+ "_task": "text-generation",
5
+ "architectures": [
6
+ "Qwen3MoeForCausalLM"
7
+ ],
8
+ "attention_bias": false,
9
+ "attention_dropout": 0.0,
10
+ "decoder_sparse_step": 2,
11
+ "dtype": "float32",
12
+ "head_dim": 32,
13
+ "hidden_act": "silu",
14
+ "hidden_size": 64,
15
+ "initializer_range": 0.02,
16
+ "intermediate_size": 128,
17
+ "max_position_embeddings": 40960,
18
+ "max_window_layers": 1,
19
+ "mlp_only_layers": [],
20
+ "model_type": "qwen3_moe",
21
+ "moe_intermediate_size": 128,
22
+ "neuron": {
23
+ "_serialized_key": "NxDNeuronConfig",
24
+ "batch_size": 1,
25
+ "capacity_factor": null,
26
+ "checkpoint_id": "optimum-internal-testing/tiny-random-qwen3_moe",
27
+ "checkpoint_revision": "e0230be2839556b44b7400a233c73c74b4abb7af",
28
+ "continuous_batching": false,
29
+ "ep_degree": 1,
30
+ "fused_qkv": false,
31
+ "glu_mlp": true,
32
+ "local_ranks_size": 2,
33
+ "max_batch_size": 1,
34
+ "max_context_length": 1024,
35
+ "max_topk": 256,
36
+ "n_active_tokens": 1024,
37
+ "neuronxcc_version": "2.21.33363.0+82129205",
38
+ "on_device_sampling": true,
39
+ "optimum_neuron_version": "0.4.3.dev3",
40
+ "output_logits": false,
41
+ "pp_degree": 1,
42
+ "sequence_length": 1024,
43
+ "sequence_parallel_enabled": false,
44
+ "speculation_length": 0,
45
+ "start_rank_id": 0,
46
+ "target": "trn1",
47
+ "torch_dtype": "float32",
48
+ "tp_degree": 2
49
+ },
50
+ "norm_topk_prob": true,
51
+ "num_attention_heads": 2,
52
+ "num_experts": 8,
53
+ "num_experts_per_tok": 2,
54
+ "num_hidden_layers": 2,
55
+ "num_key_value_heads": 1,
56
+ "output_router_logits": false,
57
+ "rms_norm_eps": 1e-06,
58
+ "rope_scaling": null,
59
+ "rope_theta": 1000000.0,
60
+ "router_aux_loss_coef": 0.001,
61
+ "sliding_window": null,
62
+ "tie_word_embeddings": true,
63
+ "use_cache": true,
64
+ "use_sliding_window": false,
65
+ "vocab_size": 151936
66
+ }
neuronxcc-2.21.33363.0+82129205/0_REGISTRY/0.4.3.dev3/smollm3/HuggingFaceTB/SmolLM3-3B/9a47a3a9f135594c847d.json ADDED
@@ -0,0 +1,135 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_entry_class": "SingleModelCacheEntry",
3
+ "_model_id": "HuggingFaceTB/SmolLM3-3B",
4
+ "_task": "text-generation",
5
+ "architectures": [
6
+ "SmolLM3ForCausalLM"
7
+ ],
8
+ "attention_bias": false,
9
+ "attention_dropout": 0.0,
10
+ "dtype": "bfloat16",
11
+ "hidden_act": "silu",
12
+ "hidden_size": 2048,
13
+ "initializer_range": 0.02,
14
+ "intermediate_size": 11008,
15
+ "layer_types": [
16
+ "full_attention",
17
+ "full_attention",
18
+ "full_attention",
19
+ "full_attention",
20
+ "full_attention",
21
+ "full_attention",
22
+ "full_attention",
23
+ "full_attention",
24
+ "full_attention",
25
+ "full_attention",
26
+ "full_attention",
27
+ "full_attention",
28
+ "full_attention",
29
+ "full_attention",
30
+ "full_attention",
31
+ "full_attention",
32
+ "full_attention",
33
+ "full_attention",
34
+ "full_attention",
35
+ "full_attention",
36
+ "full_attention",
37
+ "full_attention",
38
+ "full_attention",
39
+ "full_attention",
40
+ "full_attention",
41
+ "full_attention",
42
+ "full_attention",
43
+ "full_attention",
44
+ "full_attention",
45
+ "full_attention",
46
+ "full_attention",
47
+ "full_attention",
48
+ "full_attention",
49
+ "full_attention",
50
+ "full_attention",
51
+ "full_attention"
52
+ ],
53
+ "max_position_embeddings": 65536,
54
+ "max_window_layers": 28,
55
+ "mlp_bias": false,
56
+ "model_type": "smollm3",
57
+ "neuron": {
58
+ "_serialized_key": "NxDNeuronConfig",
59
+ "batch_size": 1,
60
+ "capacity_factor": null,
61
+ "checkpoint_id": "HuggingFaceTB/SmolLM3-3B",
62
+ "checkpoint_revision": "a07cc9a04f16550a088caea529712d1d335b0ac1",
63
+ "continuous_batching": false,
64
+ "ep_degree": 1,
65
+ "fused_qkv": true,
66
+ "glu_mlp": true,
67
+ "local_ranks_size": 2,
68
+ "max_batch_size": 1,
69
+ "max_context_length": 1024,
70
+ "max_topk": 256,
71
+ "n_active_tokens": 1024,
72
+ "neuronxcc_version": "2.21.33363.0+82129205",
73
+ "on_device_sampling": true,
74
+ "optimum_neuron_version": "0.4.3.dev3",
75
+ "output_logits": false,
76
+ "pp_degree": 1,
77
+ "sequence_length": 1024,
78
+ "sequence_parallel_enabled": false,
79
+ "speculation_length": 0,
80
+ "start_rank_id": 0,
81
+ "target": "trn1",
82
+ "torch_dtype": "bfloat16",
83
+ "tp_degree": 2
84
+ },
85
+ "no_rope_layer_interval": 4,
86
+ "no_rope_layers": [
87
+ 1,
88
+ 1,
89
+ 1,
90
+ 0,
91
+ 1,
92
+ 1,
93
+ 1,
94
+ 0,
95
+ 1,
96
+ 1,
97
+ 1,
98
+ 0,
99
+ 1,
100
+ 1,
101
+ 1,
102
+ 0,
103
+ 1,
104
+ 1,
105
+ 1,
106
+ 0,
107
+ 1,
108
+ 1,
109
+ 1,
110
+ 0,
111
+ 1,
112
+ 1,
113
+ 1,
114
+ 0,
115
+ 1,
116
+ 1,
117
+ 1,
118
+ 0,
119
+ 1,
120
+ 1,
121
+ 1,
122
+ 0
123
+ ],
124
+ "num_attention_heads": 16,
125
+ "num_hidden_layers": 36,
126
+ "num_key_value_heads": 4,
127
+ "pretraining_tp": 2,
128
+ "rms_norm_eps": 1e-06,
129
+ "rope_scaling": null,
130
+ "rope_theta": 5000000.0,
131
+ "sliding_window": null,
132
+ "use_cache": false,
133
+ "use_sliding_window": false,
134
+ "vocab_size": 128256
135
+ }
neuronxcc-2.21.33363.0+82129205/MODULE_01328268a19023ed0f24+f8e6d902/compile_flags.json ADDED
@@ -0,0 +1 @@
 
 
1
+ ["--target=trn1", "--auto-cast=none", "--model-type=transformer", "--tensorizer-options=--enable-ccop-compute-overlap --cc-pipeline-tiling-factor=2", "-O1", "--lnc=1", "--enable-saturate-infinity", "--enable-mixed-precision-accumulation", "--internal-enable-dge-levels=vector_dynamic_offsets", "--internal-hlo2tensorizer-options=--verify-hlo=true", "--logfile=/tmp/nxd_model/context_encoding/_tp0_bk0/log-neuron-cc.txt"]
neuronxcc-2.21.33363.0+82129205/MODULE_01328268a19023ed0f24+f8e6d902/model.done ADDED
File without changes
neuronxcc-2.21.33363.0+82129205/MODULE_01328268a19023ed0f24+f8e6d902/model.hlo_module.pb ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:5a7be97d2012aa41b5f56bf5bf86357a0dae77fd27821fa3e0737aba2fe5521e
3
+ size 97794
neuronxcc-2.21.33363.0+82129205/MODULE_01328268a19023ed0f24+f8e6d902/model.neff ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:7ff6c1f2a8f76328f3ffd5cd9cbb7ad62552ac54af96be0b68b754f3e79f2c6e
3
+ size 410624
neuronxcc-2.21.33363.0+82129205/MODULE_137f98143cdeb07dab7d+a02c3a36/compile_flags.json ADDED
@@ -0,0 +1 @@
 
 
1
+ ["--target=trn1", "--auto-cast=none", "--model-type=transformer", "--tensorizer-options=--enable-ccop-compute-overlap --cc-pipeline-tiling-factor=2 --vectorize-strided-dma ", "-O2", "--lnc=1", "--logfile=/tmp/nxd_model/token_generation/_tp0_bk0/log-neuron-cc.txt", "--enable-internal-neff-wrapper"]
neuronxcc-2.21.33363.0+82129205/MODULE_137f98143cdeb07dab7d+a02c3a36/model.done ADDED
File without changes
neuronxcc-2.21.33363.0+82129205/MODULE_137f98143cdeb07dab7d+a02c3a36/model.hlo_module.pb ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:6c3af8ccf676bfc9035f2832be64f2c64d4fb71e544ed4a55e45823227860db7
3
+ size 863008
neuronxcc-2.21.33363.0+82129205/MODULE_137f98143cdeb07dab7d+a02c3a36/model.neff ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:db55af8b240e22fd8c6947d7fcf77d9db9951d7a854022ea5e1bcdc69b2e9e8c
3
+ size 5039104
neuronxcc-2.21.33363.0+82129205/MODULE_137f98143cdeb07dab7d+a02c3a36/wrapped_neff.hlo ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:4d12b0bd6935e99c2317788a98c688f30bccd28bc264c3e8886100cb709dfc68
3
+ size 5205771
neuronxcc-2.21.33363.0+82129205/MODULE_1d4bedf0c37a53b47737+a02c3a36/compile_flags.json ADDED
@@ -0,0 +1 @@
 
 
1
+ ["--target=trn1", "--auto-cast=none", "--model-type=transformer", "--tensorizer-options=--enable-ccop-compute-overlap --cc-pipeline-tiling-factor=2 --vectorize-strided-dma ", "-O2", "--lnc=1", "--logfile=/tmp/nxd_model/token_generation/_tp0_bk0/log-neuron-cc.txt", "--enable-internal-neff-wrapper"]
neuronxcc-2.21.33363.0+82129205/MODULE_1d4bedf0c37a53b47737+a02c3a36/model.done ADDED
File without changes
neuronxcc-2.21.33363.0+82129205/MODULE_1d4bedf0c37a53b47737+a02c3a36/model.hlo_module.pb ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:5d61ac2165fde7772e8d3035b0f89b50beec5e0d305548cbacc624eefa9452d3
3
+ size 82753
neuronxcc-2.21.33363.0+82129205/MODULE_1d4bedf0c37a53b47737+a02c3a36/model.neff ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:46ba68c0aadd4763eb642e9c19a4cc1d146805bf9f246e0e641bb2e390e292dc
3
+ size 277504
neuronxcc-2.21.33363.0+82129205/MODULE_1d4bedf0c37a53b47737+a02c3a36/wrapped_neff.hlo ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:5bc467900543a33abd00bdb0231e3abf9d59ce0ee85e2dc7dda90fe7684a866d
3
+ size 285854
neuronxcc-2.21.33363.0+82129205/MODULE_2672b897bf6327bea9e4+24129607/compile_flags.json ADDED
@@ -0,0 +1 @@
 
 
1
+ ["--target=trn1", "--auto-cast=none", "--model-type=transformer", "--tensorizer-options=--enable-ccop-compute-overlap --cc-pipeline-tiling-factor=2 --vectorize-strided-dma ", "-O2", "--lnc=1", "--logfile=/tmp/nxd_model/context_encoding/_tp0_bk0/log-neuron-cc.txt"]
neuronxcc-2.21.33363.0+82129205/MODULE_2672b897bf6327bea9e4+24129607/model.done ADDED
File without changes
neuronxcc-2.21.33363.0+82129205/MODULE_2672b897bf6327bea9e4+24129607/model.hlo_module.pb ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:b4b6486a12378501e2484cf81b9f6885eabca76a7d817e3e2041c1736bac16d7
3
+ size 82772
neuronxcc-2.21.33363.0+82129205/MODULE_2672b897bf6327bea9e4+24129607/model.neff ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:1c65b6b3a5f7c34ae231694981cf76347fb89393c25990695cd4f921f3d243e3
3
+ size 267264
neuronxcc-2.21.33363.0+82129205/MODULE_31e31b6f47afb89b553b+bafdbdde/compile_flags.json ADDED
@@ -0,0 +1 @@
 
 
1
+ ["--target=trn1", "--auto-cast=none", "--model-type=transformer", "--tensorizer-options=--enable-ccop-compute-overlap --cc-pipeline-tiling-factor=2", "-O1", "--lnc=1", "--enable-saturate-infinity", "--enable-mixed-precision-accumulation", "--internal-enable-dge-levels=vector_dynamic_offsets", "--internal-hlo2tensorizer-options=--verify-hlo=true", "--logfile=/tmp/nxd_model/token_generation/_tp0_bk0/log-neuron-cc.txt", "--enable-internal-neff-wrapper"]
neuronxcc-2.21.33363.0+82129205/MODULE_31e31b6f47afb89b553b+bafdbdde/model.done ADDED
File without changes
neuronxcc-2.21.33363.0+82129205/MODULE_31e31b6f47afb89b553b+bafdbdde/model.hlo_module.pb ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:061213de218a73c7d090fd093562e9c08718438de6287e6a332372c8f7906930
3
+ size 70276
neuronxcc-2.21.33363.0+82129205/MODULE_31e31b6f47afb89b553b+bafdbdde/model.neff ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:e64780612d32c890d4c7d297248e064089d8ef4f8a9f9d5a9a1c169c6421ec29
3
+ size 277504
neuronxcc-2.21.33363.0+82129205/MODULE_31e31b6f47afb89b553b+bafdbdde/wrapped_neff.hlo ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:54d1ec39550a8bb2e6e192265b3fed7b4a12065b0c8d82456a918f7a5be1e4ae
3
+ size 289571
neuronxcc-2.21.33363.0+82129205/MODULE_31e31b6f47afb89b553b+da15b874/compile_flags.json ADDED
@@ -0,0 +1 @@
 
 
1
+ ["--target=trn1", "--auto-cast=none", "--model-type=transformer", "--tensorizer-options=--enable-ccop-compute-overlap --cc-pipeline-tiling-factor=2", "-O1--lnc=1", "--enable-saturate-infinity", "--enable-mixed-precision-accumulation", "--internal-enable-dge-levels=vector_dynamic_offsets", "--internal-hlo2tensorizer-options=--verify-hlo=true", "--logfile=/tmp/nxd_model/token_generation/_tp0_bk0/log-neuron-cc.txt", "--enable-internal-neff-wrapper"]
neuronxcc-2.21.33363.0+82129205/MODULE_31e31b6f47afb89b553b+da15b874/model.hlo_module.pb ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:ec57d9d2f9d9be197ad88a58ece6b14b0ccab97c121b8d20cc9512558ddf6562
3
+ size 70276
neuronxcc-2.21.33363.0+82129205/MODULE_31e31b6f47afb89b553b+da15b874/model.log ADDED
@@ -0,0 +1,5 @@
 
 
 
 
 
 
1
+ Failed compilation with ['neuronx-cc', 'compile', '--framework=XLA', '/tmp/nxd_model/token_generation/_tp0_bk0/model.MODULE_31e31b6f47afb89b553b+da15b874.hlo_module.pb', '--output', '/tmp/nxd_model/token_generation/_tp0_bk0/model.MODULE_31e31b6f47afb89b553b+da15b874.neff', '--target=trn1', '--auto-cast=none', '--model-type=transformer', '--tensorizer-options=--enable-ccop-compute-overlap --cc-pipeline-tiling-factor=2', '-O1--lnc=1', '--enable-saturate-infinity', '--enable-mixed-precision-accumulation', '--internal-enable-dge-levels=vector_dynamic_offsets', '--internal-hlo2tensorizer-options=--verify-hlo=true', '--logfile=/tmp/nxd_model/token_generation/_tp0_bk0/log-neuron-cc.txt', '--enable-internal-neff-wrapper', '--verbose=35']: neuronx-cc compile: argument --optlevel/-O: invalid choice: 1--lnc=1 (choose from 1, 2, 3)
2
+ usage: neuronx-cc compile --framework {XLA} --target {trn1,inf2,trn1n,trn2,trn2n} [--logical-nc-config {1,2}] [--enable-fast-loading-neuron-binaries] [--enable-fast-context-switch]
3
+ [--auto-cast <cast mode>] [--auto-cast-type {fp16,bf16,tf32,fp8_e4m3}] [--output <filename>] [--optlevel {1,2,3}] [--help]
4
+ [--model-type {transformer,unet-inference,generic}] [--distribution-strategy {fsdp,nemo,llm-training,generic}] [--enable-dge]
5
+ [--verbose {debug|info|warning|user|off}] [--logfile <filename>] [--logfile-verbose {debug|info|warning|user}]
neuronxcc-2.21.33363.0+82129205/MODULE_4247663625e2eaeb4fab+a02c3a36/compile_flags.json ADDED
@@ -0,0 +1 @@
 
 
1
+ ["--target=trn1", "--auto-cast=none", "--model-type=transformer", "--tensorizer-options=--enable-ccop-compute-overlap --cc-pipeline-tiling-factor=2 --vectorize-strided-dma ", "-O2", "--lnc=1", "--logfile=/tmp/nxd_model/token_generation/_tp0_bk0/log-neuron-cc.txt", "--enable-internal-neff-wrapper"]
neuronxcc-2.21.33363.0+82129205/MODULE_4247663625e2eaeb4fab+a02c3a36/model.done ADDED
File without changes
neuronxcc-2.21.33363.0+82129205/MODULE_4247663625e2eaeb4fab+a02c3a36/model.hlo_module.pb ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:f0c908a5ddbf03970315b73168cfd6d3b14f39328f2b700c5d1e9c4144323034
3
+ size 91147
neuronxcc-2.21.33363.0+82129205/MODULE_4247663625e2eaeb4fab+a02c3a36/model.neff ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:ad22d482746dc393d9343d9b7855e8da2352e083b60b8627da2f1291a7ba960f
3
+ size 277504
neuronxcc-2.21.33363.0+82129205/MODULE_4247663625e2eaeb4fab+a02c3a36/wrapped_neff.hlo ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:0d64528a98e0b9f958208d4b0fa5e55a5cd5026e6c04a9d2d5ee1e01bfcec393
3
+ size 289031
neuronxcc-2.21.33363.0+82129205/MODULE_5960bdedd7549bfacf7a+24129607/compile_flags.json ADDED
@@ -0,0 +1 @@
 
 
1
+ ["--target=trn1", "--auto-cast=none", "--model-type=transformer", "--tensorizer-options=--enable-ccop-compute-overlap --cc-pipeline-tiling-factor=2 --vectorize-strided-dma ", "-O2", "--lnc=1", "--logfile=/tmp/nxd_model/context_encoding/_tp0_bk0/log-neuron-cc.txt"]
neuronxcc-2.21.33363.0+82129205/MODULE_5960bdedd7549bfacf7a+24129607/model.done ADDED
File without changes
neuronxcc-2.21.33363.0+82129205/MODULE_5960bdedd7549bfacf7a+24129607/model.hlo_module.pb ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:76c10df9f9fadac3ce5fee74c4470b2cdade440cf97718b4545a4e3de7fc54aa
3
+ size 694128
neuronxcc-2.21.33363.0+82129205/MODULE_5960bdedd7549bfacf7a+24129607/model.neff ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:5f816d25661f62f7f838c0800cb426bb2a2a552c974bc9c93181f83780ef36d8
3
+ size 625664
neuronxcc-2.21.33363.0+82129205/MODULE_73021bd2bcd85d0ae281+bafdbdde/compile_flags.json ADDED
@@ -0,0 +1 @@
 
 
1
+ ["--target=trn1", "--auto-cast=none", "--model-type=transformer", "--tensorizer-options=--enable-ccop-compute-overlap --cc-pipeline-tiling-factor=2", "-O1", "--lnc=1", "--enable-saturate-infinity", "--enable-mixed-precision-accumulation", "--internal-enable-dge-levels=vector_dynamic_offsets", "--internal-hlo2tensorizer-options=--verify-hlo=true", "--logfile=/tmp/nxd_model/token_generation/_tp0_bk0/log-neuron-cc.txt", "--enable-internal-neff-wrapper"]
neuronxcc-2.21.33363.0+82129205/MODULE_73021bd2bcd85d0ae281+bafdbdde/model.done ADDED
File without changes
neuronxcc-2.21.33363.0+82129205/MODULE_73021bd2bcd85d0ae281+bafdbdde/model.hlo_module.pb ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:6596d3769509c9146ae4a3975d1c70c2eb236f1527bf8cab5cf9306c4300289f
3
+ size 89555
neuronxcc-2.21.33363.0+82129205/MODULE_73021bd2bcd85d0ae281+bafdbdde/model.neff ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d4e6305d96ffce7a09ffe9fbe739daef69c503eceaa33e7181c52bb69f8e669d
3
+ size 369664
neuronxcc-2.21.33363.0+82129205/MODULE_73021bd2bcd85d0ae281+bafdbdde/wrapped_neff.hlo ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:0093375c250feaa10fa7e89832496ea173e63ca7c802ed01d0f81aca0be48912
3
+ size 379362