llm.c/test_gpt2_fp32.cu at master · karpathy/llm.c (github.com)
源码
// ----------------------------------------------------------------------------
// main training loop
int main(int argc, char *argv[]) {
// read in the (optional) command line arguments
const char* input_dataset_prefix = "data/tiny_shakespeare"; // or e.g. data/TinyStories
const char* output_log_file = NULL;
int B = 4; // batch size
int T = 1024; // sequence length max
float learning_rate = 3e-4f;
int val_loss_every = 20; // every how many steps do we eval validation