parser.add_argument("--expr_nb_variables", type=int, default=5)
-parser.add_argument("--expr_sequence_length", type=int, default=30)
+parser.add_argument("--expr_sequence_length", type=int, default=40)
+
+parser.add_argument("--expr_input_file", type=str, default=None)
######################################################################
######################################################################
+if args.task == "expr" and args.expr_input_file is not None:
+ task.produce_results(
+ nb_epochs_finished,
+ model,
+ args.result_dir,
+ log_string,
+ args.deterministic_synthesis,
+ args.expr_input_file,
+ )
+
+ exit(0)
+
+######################################################################
+
nb_epochs = args.nb_epochs if args.nb_epochs > 0 else nb_epochs_default
# Compute the entropy of the training tokens
train_examples = {}
+
for input in task.batches(split="train"):
assert input.dim() == 2 and input.dtype == torch.int64
for x in input:
train_examples[x.sum().item()] = x
+nb_total, nb_collisions = 0, 0
for input in task.batches(split="test"):
assert input.dim() == 2 and input.dtype == torch.int64
for x in input:
+ nb_total += 1
y = train_examples.get(x.sum().item())
if y is not None:
- assert x.size() != y.size() or (x - y).abs().sum() > 0
+ if x.size() == y.size() and (x - y).abs().sum() == 0:
+ nb_collisions += 1
del train_examples
+log_string(
+ f"data_check {nb_collisions*100/nb_total:.02f}% ({nb_collisions}/{nb_total}) of test samples are in the train set"
+)
+
##############################
if args.learning_rate_schedule == "cos":