text stringlengths 1 93.6k |
|---|
# Routing algorithm
|
# Calculate routing or also known as coupling coefficients (c_ij).
|
# c_ij shape: [1, 1152, 10, 1]
|
c_ij = F.softmax(b_ij, dim=2) # Convert routing logits (b_ij) to softmax.
|
# c_ij shape from: [128, 1152, 10, 1] to: [128, 1152, 10, 1, 1]
|
c_ij = torch.cat([c_ij] * batch_size, dim=0).unsqueeze(4)
|
# Implement equation 2 in the paper.
|
# s_j is total input to a capsule, is a weigthed sum over all "prediction vectors".
|
# u_hat is weighted inputs, prediction ˆuj|i made by capsule i.
|
# c_ij * u_hat shape: [128, 1152, 10, 16, 1]
|
# s_j output shape: [batch_size=128, 1, 10, 16, 1]
|
# Sum of Primary Capsules outputs, 1152D becomes 1D.
|
s_j = (c_ij * u_hat).sum(dim=1, keepdim=True)
|
# Squash the vector output of capsule j.
|
# v_j shape: [batch_size, weighted sum of PrimaryCaps output,
|
# num_classes, output_unit_size from u_hat, 1]
|
# == [128, 1, 10, 16, 1]
|
# So, the length of the output vector of a capsule is 16, which is in dim 3.
|
v_j = utils.squash(s_j, dim=3)
|
# in_channel is 1152.
|
# v_j1 shape: [128, 1152, 10, 16, 1]
|
v_j1 = torch.cat([v_j] * self.in_channel, dim=1)
|
# The agreement.
|
# Transpose u_hat with shape [128, 1152, 10, 16, 1] to [128, 1152, 10, 1, 16],
|
# so we can do matrix product u_hat and v_j1.
|
# u_vj1 shape: [1, 1152, 10, 1]
|
u_vj1 = torch.matmul(u_hat.transpose(3, 4), v_j1).squeeze(4).mean(dim=0, keepdim=True)
|
# Update routing (b_ij) by adding the agreement to the initial logit.
|
b_ij = b_ij + u_vj1
|
return v_j.squeeze(1) # shape: [128, 10, 16, 1]
|
def no_routing(self, x):
|
"""
|
Get output for each unit.
|
A unit has batch, channels, height, width.
|
An example of a unit output shape is [128, 32, 6, 6]
|
:return: vector output of capsule j
|
"""
|
# Create 8 convolutional unit.
|
# A convolutional unit uses normal convolutional layer with a non-linearity (squash).
|
unit = [self.conv_units[i](x) for i, l in enumerate(self.conv_units)]
|
# Stack all unit outputs.
|
# Stacked of 8 unit output shape: [128, 8, 32, 6, 6]
|
unit = torch.stack(unit, dim=1)
|
batch_size = x.size(0)
|
# Flatten the 32 of 6x6 grid into 1152.
|
# Shape: [128, 8, 1152]
|
unit = unit.view(batch_size, self.num_unit, -1)
|
# Add non-linearity
|
# Return squashed outputs of shape: [128, 8, 1152]
|
return utils.squash(unit, dim=2) # dim 2 is the third dim (1152D array) in our tensor
|
# <FILESEP>
|
import sqlite3
|
import os, json
|
config = json.load(open('src/config.json'))
|
database = config['database']
|
if os.path.exists(database):
|
confirm = input('Database already exists. Do you want to delete it and create a new one? (y/[N]): ')
|
if confirm == 'y':
|
os.remove(database)
|
print('[-] Database already exists. Deleting it.')
|
else:
|
print('[-] Exiting.')
|
exit()
|
conn = sqlite3.connect(database)
|
print('[+] Database opened successfully.')
|
conn.execute('''CREATE TABLE users
|
(UserId INTEGER PRIMARY KEY,
|
date STRING NOT NULL
|
);''')
|
print('[+] Table users created successfully.')
|
conn.execute('''CREATE TABLE settings
|
(ownerId INTEGER PRIMARY KEY,
|
language TEXT DEFAULT "english",
|
playlist TEXT DEFAULT "m3u",
|
githubId TEXT DEFAULT 0,
|
totalRefer INTEGER DEFAULT 0,
|
defaultAcId INTEGER
|
);''')
|
print('[+] Table settings created successfully.')
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.