PackedLayer Class — pytorch Architecture
Architecture documentation for the PackedLayer class in RNN.cpp from the pytorch codebase.
Entity Profile
Source Code
aten/src/ATen/native/RNN.cpp lines 933–992
template<typename hidden_type, typename cell_params>
struct PackedLayer : Layer<PackedSequence, hidden_type, cell_params> {
using output_type =
typename Layer<PackedSequence, hidden_type, cell_params>::output_type;
PackedLayer(Cell<hidden_type, cell_params>& cell)
: cell_(cell) {}
output_type operator()(
const PackedSequence& input,
const hidden_type& input_hidden,
const cell_params& params) const override {
std::vector<at::Tensor> step_outputs;
std::vector<hidden_type> hiddens;
int64_t input_offset = 0;
int64_t num_steps = input.batch_sizes.size(0);
const int64_t* batch_sizes = input.batch_sizes.const_data_ptr<int64_t>();
int64_t last_batch_size = batch_sizes[0];
const Tensor* input_ptr = &input.data;
bool pre_compute_input = false;
Tensor input_w;
if (input.data.device().is_cpu()) {
input_w = params.linear_ih(input.data);
input_ptr = &input_w;
pre_compute_input = true;
}
// Batch sizes is a sequence of decreasing lengths, which are offsets
// into a 1D list of inputs. At every step we slice out batch_size elements,
// and possibly account for the decrease in the batch size since the last step,
// which requires us to slice the hidden state (since some sequences
// are completed now). The sliced parts are also saved, because we will need
// to return a tensor of final hidden state.
auto hidden = input_hidden;
for (const auto i : c10::irange(num_steps)) {
const int64_t batch_size = batch_sizes[i];
auto step_input = input_ptr->narrow(0, input_offset, batch_size);
input_offset += batch_size;
const int64_t dec = last_batch_size - batch_size;
if (dec > 0) {
hiddens.emplace_back(
hidden_slice(hidden, last_batch_size - dec, last_batch_size));
hidden = hidden_slice(hidden, 0, last_batch_size - dec);
}
last_batch_size = batch_size;
hidden = cell_(step_input, hidden, params, pre_compute_input);
step_outputs.push_back(hidden_as_output(hidden));
}
hiddens.emplace_back(hidden);
std::reverse(hiddens.begin(), hiddens.end());
return {PackedSequence{at::cat(step_outputs, 0), input.batch_sizes},
hidden_concat(hiddens)};
}
Cell<hidden_type, cell_params>& cell_;
};
Source
Analyze Your Own Codebase
Get architecture documentation, dependency graphs, and domain analysis for your codebase in minutes.
Try Supermodel Free