Amitz244 commited on
Commit
807ea7b
·
verified ·
1 Parent(s): fb43af1

Update modeling.py

Browse files
Files changed (1) hide show
  1. modeling.py +3 -3
modeling.py CHANGED
@@ -4,7 +4,7 @@ from transformers import CLIPModel
4
  from peft import LoraConfig, get_peft_model
5
 
6
  class MLP(nn.Module):
7
- def __init__(self, input_dim, hidden_dim1, hidden_dim2, output_dim,dropout_rate=0.5):
8
  super(MLP, self).__init__()
9
  self.fc1 = nn.Linear(input_dim, hidden_dim1)
10
  self.relu1 = nn.ReLU()
@@ -24,7 +24,7 @@ class MLP(nn.Module):
24
  return x
25
 
26
  class clip_lora_model(nn.Module):
27
- def __init__(self, input_dim, hidden_dim1, hidden_dim2, output_dim,dropout_rate=0.5,r=16,lora_alpha=8):
28
  super(clip_lora_model, self).__init__()
29
  self.output_dim=output_dim
30
  self.mlp = MLP(input_dim, hidden_dim1, hidden_dim2, output_dim,dropout_rate)
@@ -45,7 +45,7 @@ class clip_lora_model(nn.Module):
45
  )
46
  self.model = get_peft_model(encoder, config)
47
 
48
- def forward(self, x,feat=None,flags=None):
49
  model_outputs = self.model(x)
50
  image_embeds = model_outputs[1]
51
  model_outputs = self.proj(image_embeds)
 
4
  from peft import LoraConfig, get_peft_model
5
 
6
  class MLP(nn.Module):
7
+ def __init__(self, input_dim=768, hidden_dim1=512, hidden_dim2=256, output_dim=8,dropout_rate=0.5):
8
  super(MLP, self).__init__()
9
  self.fc1 = nn.Linear(input_dim, hidden_dim1)
10
  self.relu1 = nn.ReLU()
 
24
  return x
25
 
26
  class clip_lora_model(nn.Module):
27
+ def __init__(self, input_dim=768, hidden_dim1=512, hidden_dim2=256, output_dim=8,dropout_rate=0.5,r=16,lora_alpha=8):
28
  super(clip_lora_model, self).__init__()
29
  self.output_dim=output_dim
30
  self.mlp = MLP(input_dim, hidden_dim1, hidden_dim2, output_dim,dropout_rate)
 
45
  )
46
  self.model = get_peft_model(encoder, config)
47
 
48
+ def forward(self, x):
49
  model_outputs = self.model(x)
50
  image_embeds = model_outputs[1]
51
  model_outputs = self.proj(image_embeds)