Skip to content

Commit

Permalink
remove rng_state fill
Browse files Browse the repository at this point in the history
  • Loading branch information
Seventeen17 committed Nov 19, 2024
1 parent 7ad0227 commit 72e913c
Showing 1 changed file with 0 additions and 1 deletion.
1 change: 0 additions & 1 deletion torch_xla/csrc/ops/flash_attention_forward.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -250,7 +250,6 @@ void custom_call_flash_attention_forward(cudaStream_t stream, void** buffers,
torch::from_blob(buffers[5 + buf_offset], {2}, opts.dtype(torch::kInt64));
// Forward kernel will populate memory with the seed and offset.
launch_params.rng_state = reinterpret_cast<uint64_t*>(rng_state.data_ptr());
cudaMemsetAsync(rng_state.data_ptr(), 0, 2 * sizeof(int64_t), cuda_stream);

if ((1.f - launch_params.p_dropout) > 0.0) {
// number of times random will be generated per thread, to offset philox
Expand Down

0 comments on commit 72e913c

Please sign in to comment.