Skip to content

Commit

Permalink
update examples
Browse files Browse the repository at this point in the history
  • Loading branch information
whw199833 committed Nov 30, 2023
1 parent 03164c6 commit 42f5806
Show file tree
Hide file tree
Showing 103 changed files with 2,562 additions and 5 deletions.
35 changes: 35 additions & 0 deletions example/layer/BiLinearInteractionLayer_test.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,35 @@
#!/usr/bin/env python
# coding: utf-8

# In[2]:


import torch
import torch.nn as nn
from gbiz_torch.layer import BiLinearInteractionLayer


# In[3]:


input = torch.randint(0, 3, (2, 3))
get_emb = nn.Embedding(3, 4)
input_seq = get_emb(input)


# In[4]:


test_input = input_seq
BI_layer = BiLinearInteractionLayer(in_shape=test_input.shape[-1])

test_output = BI_layer(test_input)

print(test_output.shape)
print(f"test_output is {test_output}")


# In[5]:


BI_layer
File renamed without changes.
39 changes: 39 additions & 0 deletions example/layer/BridgeLayer_test.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,39 @@
#!/usr/bin/env python
# coding: utf-8

# In[2]:


import torch
import torch.nn as nn
from gbiz_torch.layer import BridgeLayer


# In[12]:


test_input = torch.randn(8, 6)


# In[13]:


BL_layer = BridgeLayer(in_shape=test_input.shape[-1], n_layers=5)

test_output = BL_layer(test_input)

print(f"test_output.shape is {test_output.shape}")
print(f"test_output is {test_output}")


# In[5]:


Cross_layer


# In[ ]:




44 changes: 44 additions & 0 deletions example/layer/CGCGatingNetworkLayer_test.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,44 @@
#!/usr/bin/env python
# coding: utf-8

# In[2]:


import torch
from gbiz_torch.layer import CGCGatingNetworkLayer


# In[5]:


expert1_output = torch.unsqueeze(torch.randn(8, 10), dim=1)
expert2_output = torch.unsqueeze(torch.randn(8, 10), dim=1)
expert3_output = torch.unsqueeze(torch.randn(8, 10), dim=1)

expert4_output = torch.unsqueeze(torch.randn(8, 10), dim=1)
expert5_output = torch.unsqueeze(torch.randn(8, 10), dim=1)

task_expert_input = torch.cat((expert1_output, expert2_output, expert3_output), dim=1)
shared_expert_input = torch.cat((expert4_output, expert5_output), dim=1)
input = torch.mean(torch.cat((expert1_output, expert2_output, expert3_output, expert4_output, expert5_output), dim=1), dim=1)

task_expert_input.shape, shared_expert_input.shape, input.shape


# In[7]:


test_input = (task_expert_input, shared_expert_input, input)
CGCGN_layer = CGCGatingNetworkLayer(in_shape=test_input[-1].shape[-1], total_experts=5)

test_output = CGCGN_layer(test_input)

print(test_output.shape)
print(f"test_output is {test_output}")


# In[8]:


CGCGatingNetworkLayer

File renamed without changes.
43 changes: 43 additions & 0 deletions example/layer/CoActionLayer_test.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,43 @@
#!/usr/bin/env python
# coding: utf-8

# In[11]:


import torch
import torch.nn as nn
from gbiz_torch.layer import CoActionLayer


# In[12]:


input_a = torch.randint(0, 3, (8, 10))
get_emb = nn.Embedding(3, 5)
input_seq = get_emb(input_a)
input_item = torch.randn(8, 85)


# In[13]:


test_input = (input_seq, input_item)
# print(f"test_input is {test_input}")
CoA_layer = CoActionLayer(in_shape_list=[test_input[0].shape[-1], test_input[1].shape[-1]])
test_output = CoA_layer(test_input)

print(test_output.shape)
print(f"test_output is {test_output}")


# In[14]:


CoActionLayer


# In[ ]:




File renamed without changes.
41 changes: 41 additions & 0 deletions example/layer/ContextNetBlockLayer_test.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,41 @@
#!/usr/bin/env python
# coding: utf-8

# In[2]:


import torch
import torch.nn as nn
from gbiz_torch.layer import ContextNetBlockLayer


# In[3]:


input_a = torch.randint(0, 3, (8, 10))
get_emb = nn.Embedding(3, 5)
input_seq = get_emb(input_a)


# In[4]:


# print(f"test_input is {test_input}")
CNB_layer = ContextNetBlockLayer(fields=input_seq.shape[1], in_shape=input_seq.shape[2])
test_output = CNB_layer(input_seq)

print(test_output.shape)
# print(f"test_output is {test_output}")


# In[5]:


ContextNetBlockLayer


# In[ ]:




File renamed without changes.
43 changes: 43 additions & 0 deletions example/layer/CrossLayer_test.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,43 @@
#!/usr/bin/env python
# coding: utf-8

# In[2]:


import torch
import torch.nn as nn
from gbiz_torch.layer import CrossLayer


# In[3]:


test_input = torch.randn(4, 5)
# padding_idx = 0
# embedding = nn.Embedding(10, 3, padding_idx=padding_idx)
# seq_emb_input = embedding(test_input)


# In[4]:


print(f"test_input.shape is {test_input.shape}")
Cross_layer = CrossLayer(in_shape=test_input.shape[-1], n_layers=3)

test_output = Cross_layer(test_input)

print(f"test_output.shape is {test_output.shape}")
print(f"test_output is {test_output}")


# In[5]:


Cross_layer


# In[ ]:




File renamed without changes.
52 changes: 52 additions & 0 deletions example/layer/CrossStitchLayer_test.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,52 @@
#!/usr/bin/env python
# coding: utf-8

# In[3]:


import torch
import torch.nn as nn
from gbiz_torch.layer import CrossStitchLayer


# In[4]:


test_input = []
in_shape_list = []
for i in range(5):
tmp = torch.randn((4, 5))
test_input.append(tmp)
in_shape_list.append(tmp.shape[-1])

print(f"test_input is {test_input}")


# In[5]:


CrossStitch_layer = CrossStitchLayer(in_shape_list=in_shape_list)

test_output = CrossStitch_layer(test_input)

# print(f"test_output.shape is {test_output.shape}")
print(f"test_output is {test_output}")


# In[6]:


CrossStitch_layer


# In[9]:


torch.stack(test_output).shape


# In[ ]:




File renamed without changes.
44 changes: 44 additions & 0 deletions example/layer/DCAPLayer_test.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,44 @@
#!/usr/bin/env python
# coding: utf-8

# In[2]:


import torch
import torch.nn as nn
from gbiz_torch.layer import DCAPLayer


# In[3]:


test_input = torch.randint(0, 3, (8, 5))
get_emb = nn.Embedding(3, 16)
seq_input = get_emb(test_input)
seq_input.shape


# ### without mask

# In[12]:


DCAP_layer = DCAPLayer(fields=3, in_shape=seq_input.shape[-1])

test_output = DCAP_layer(seq_input)

print(f"test_output.shape is {test_output.shape}")
print(f"test_output is {test_output}")


# In[5]:


DCAP_layer


# In[ ]:




File renamed without changes.
34 changes: 34 additions & 0 deletions example/layer/DNNLayer_test.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,34 @@
#!/usr/bin/env python
# coding: utf-8

# In[10]:


import torch
from gbiz_torch.layer import DNNLayer


# In[11]:


test_input = torch.randn((16, 5))
print(f"test_input is {test_input}")
dnn_layer = DNNLayer(in_shape=test_input.shape[-1], hidden_units=[10, 8, 1], activation='leaky_relu', dropout_rate=0.3, use_bn=True, l2_reg=0.9)

test_output = dnn_layer(test_input)

print(test_output.shape)
print(f"test_output is {test_output}")


# In[12]:


dnn_layer


# In[ ]:




File renamed without changes.
Loading

0 comments on commit 42f5806

Please sign in to comment.