You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

26 lines
616 B

import json
import torch
with open('vocab/vocabulary.json', 'r', encoding='utf-8') as f:
vocab = json.load(f)
vocab_size = len(vocab)
with open('vocab/sm_voc.json', 'r', encoding='utf-8') as f:
sm_size = len(json.load(f))
batch_size=1
input_dim=1024
embedding_dim=input_dim
dropout=0.1
n_head=32
Encoder_n_layers=1
d_ff1=input_dim*4
d_ff2=input_dim*4
Decoder_n_layer=0
hidden_dim=input_dim
seq_len=30
kernal_size=seq_len*2
stride=2
bias=True
device=torch.device('cuda') if torch.cuda.is_available() else torch.device('cpu')
print(f'cuda available:{torch.cuda.is_available()}')