Skip to content

Commit 6972751

Browse files
committed
Remove unused layer mapping
1 parent 5d151ff commit 6972751

File tree

6 files changed

+0
-12
lines changed

6 files changed

+0
-12
lines changed

lib/bumblebee/text/gemma.ex

Lines changed: 0 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -449,8 +449,6 @@ defmodule Bumblebee.Text.Gemma do
449449
"decoder.blocks.{n}.self_attention.value" => "model.layers.{n}.self_attn.v_proj",
450450
"decoder.blocks.{n}.self_attention.output" => "model.layers.{n}.self_attn.o_proj",
451451
"decoder.blocks.{n}.self_attention_norm" => "model.layers.{n}.input_layernorm",
452-
"decoder.blocks.{n}.self_attention.rotary_embedding" =>
453-
"model.layers.{n}.self_attn.rotary_emb",
454452
"decoder.blocks.{n}.ffn.gate" => "model.layers.{n}.mlp.gate_proj",
455453
"decoder.blocks.{n}.ffn.intermediate" => "model.layers.{n}.mlp.up_proj",
456454
"decoder.blocks.{n}.ffn.output" => "model.layers.{n}.mlp.down_proj",

lib/bumblebee/text/gpt_neo_x.ex

Lines changed: 0 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -426,8 +426,6 @@ defmodule Bumblebee.Text.GptNeoX do
426426
),
427427
"decoder.blocks.{n}.self_attention.output" => "gpt_neox.layers.{n}.attention.dense",
428428
"decoder.blocks.{n}.self_attention_norm" => "gpt_neox.layers.{n}.input_layernorm",
429-
"decoder.blocks.{n}.self_attention.rotary_embedding" =>
430-
"gpt_neox.layers.{n}.self_attn.rotary_emb",
431429
"decoder.blocks.{n}.ffn.intermediate" => "gpt_neox.layers.{n}.mlp.dense_h_to_4h",
432430
"decoder.blocks.{n}.ffn.output" => "gpt_neox.layers.{n}.mlp.dense_4h_to_h",
433431
"decoder.blocks.{n}.output_norm" => "gpt_neox.layers.{n}.post_attention_layernorm",

lib/bumblebee/text/llama.ex

Lines changed: 0 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -469,8 +469,6 @@ defmodule Bumblebee.Text.Llama do
469469
"decoder.blocks.{n}.self_attention.value" => "model.layers.{n}.self_attn.v_proj",
470470
"decoder.blocks.{n}.self_attention.output" => "model.layers.{n}.self_attn.o_proj",
471471
"decoder.blocks.{n}.self_attention_norm" => "model.layers.{n}.input_layernorm",
472-
"decoder.blocks.{n}.self_attention.rotary_embedding" =>
473-
"model.layers.{n}.self_attn.rotary_emb",
474472
"decoder.blocks.{n}.ffn.gate" => "model.layers.{n}.mlp.gate_proj",
475473
"decoder.blocks.{n}.ffn.intermediate" => "model.layers.{n}.mlp.up_proj",
476474
"decoder.blocks.{n}.ffn.output" => "model.layers.{n}.mlp.down_proj",

lib/bumblebee/text/mistral.ex

Lines changed: 0 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -411,8 +411,6 @@ defmodule Bumblebee.Text.Mistral do
411411
"decoder.blocks.{n}.self_attention.value" => "model.layers.{n}.self_attn.v_proj",
412412
"decoder.blocks.{n}.self_attention.output" => "model.layers.{n}.self_attn.o_proj",
413413
"decoder.blocks.{n}.self_attention_norm" => "model.layers.{n}.input_layernorm",
414-
"decoder.blocks.{n}.self_attention.rotary_embedding" =>
415-
"model.layers.{n}.self_attn.rotary_emb",
416414
"decoder.blocks.{n}.ffn.gate" => "model.layers.{n}.mlp.gate_proj",
417415
"decoder.blocks.{n}.ffn.intermediate" => "model.layers.{n}.mlp.up_proj",
418416
"decoder.blocks.{n}.ffn.output" => "model.layers.{n}.mlp.down_proj",

lib/bumblebee/text/phi.ex

Lines changed: 0 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -442,8 +442,6 @@ defmodule Bumblebee.Text.Phi do
442442
"decoder.blocks.{n}.self_attention.value" => "model.layers.{n}.self_attn.v_proj",
443443
"decoder.blocks.{n}.self_attention.output" => "model.layers.{n}.self_attn.dense",
444444
"decoder.blocks.{n}.self_attention_norm" => "model.layers.{n}.input_layernorm",
445-
"decoder.blocks.{n}.self_attention.rotary_embedding" =>
446-
"model.layers.{n}.self_attn.rotary_emb",
447445
"decoder.blocks.{n}.ffn.intermediate" => "model.layers.{n}.mlp.fc1",
448446
"decoder.blocks.{n}.ffn.output" => "model.layers.{n}.mlp.fc2",
449447
"output_norm" => "model.final_layernorm",

lib/bumblebee/text/phi3.ex

Lines changed: 0 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -497,8 +497,6 @@ defmodule Bumblebee.Text.Phi3 do
497497
),
498498
"decoder.blocks.{n}.self_attention.output" => "model.layers.{n}.self_attn.o_proj",
499499
"decoder.blocks.{n}.self_attention_norm" => "model.layers.{n}.input_layernorm",
500-
"decoder.blocks.{n}.self_attention.rotary_embedding" =>
501-
"model.layers.{n}.self_attn.rotary_emb",
502500
"decoder.blocks.{n}.ffn.gate" =>
503501
Shared.sliced_dense_params_source(
504502
"model.layers.{n}.mlp.gate_up_proj",

0 commit comments

Comments
 (0)