Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

1156 downsampling experiment #1165

Merged
merged 8 commits into from
Aug 18, 2022
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
16 changes: 16 additions & 0 deletions adam/curriculum/curriculum_from_files.py
Original file line number Diff line number Diff line change
Expand Up @@ -30,6 +30,14 @@
"m5_objects_v0_with_mugs",
"m5_actions",
"m5_actions_person_only",
"m6_objects_downsampled_2pertype_post_gnn",
"m6_objects_downsampled_10pertype_post_gnn",
"m5_objects_v0_with_mugs_post_gnn",
"m6_unknown_objects",
"m6_objects_v0_with_mugs_post_gnn_top2",
"m6_objects_v0_with_mugs_post_gnn_top3",
"m6_objects_v0_with_mugs_post_gnn_top4",
"m6_objects_v0_with_mugs_post_gnn_top5",
]

PHASE_3_TESTING_CURRICULUM_OPTIONS = [
Expand All @@ -40,6 +48,14 @@
"m5_objects_v0_with_mugs_eval",
"m5_actions_eval",
"m5_actions_person_only_eval",
"m5_objects_v0_with_mugs_eval_2pertype_post_gnn",
"m5_objects_v0_with_mugs_eval_10pertype_post_gnn",
"m5_objects_v0_with_mugs_eval_post_gnn",
"m6_unknown_objects_eval",
"m6_objects_v0_with_mugs_eval_post_gnn_top2",
"m6_objects_v0_with_mugs_eval_post_gnn_top3",
"m6_objects_v0_with_mugs_eval_post_gnn_top4",
"m6_objects_v0_with_mugs_eval_post_gnn_top5",
]

TRAINING_CUR = "training"
Expand Down
9 changes: 9 additions & 0 deletions adam/perception/visual_perception.py
Original file line number Diff line number Diff line change
Expand Up @@ -156,6 +156,15 @@ def from_mapping(
weight=strokes_map["confidence_score"],
)
)
if "concept_names" in strokes_map:
for concept_name in strokes_map["concept_names"]:
properties.append(
StrokeGNNRecognitionNode(
object_recognized=concept_name,
confidence=strokes_map["confidence_score"],
weight=strokes_map["confidence_score"],
)
)

# Cluster ID is cleaned so that only the digit ID is displayed and not 'object'
clusters.append(
Expand Down
26 changes: 20 additions & 6 deletions adam_preprocessing/shape_stroke_graph_inference.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@
from copy import deepcopy
import logging
from pathlib import Path
from typing import Sequence

import torch
import torch.nn as nn
Expand All @@ -22,12 +23,19 @@
from utils import accuracy, get_stroke_data, LinearModel, load_data, STRING_OBJECT_LABELS


def update_features_yaml(features_yaml, *, predicted_object: str):
def update_features_yaml(features_yaml, *, predicted_objects: Sequence[str]):
# jac: not terribly efficient to do a full deepcopy, but who cares, this should be a small dict
# anyway... also this is probably much less expensive than the GNN inference itself.
result = deepcopy(features_yaml)
for object_ in result["objects"]:
object_["stroke_graph"]["concept_name"] = predicted_object
if len(predicted_objects) == 1:
object_["stroke_graph"]["concept_name"] = predicted_objects[0]
if "concept_names" in object_["stroke_graph"]:
del object_["stroke_graph"]["concept_names"]
else:
object_["stroke_graph"]["concept_names"] = predicted_objects
if "concept_name" in object_["stroke_graph"]:
del object_["stroke_graph"]["concept_name"]
return result


Expand All @@ -50,6 +58,12 @@ def main():
help="Directory where we should write the feature outputs to. Outputs are structured as if "
"the output path is a curriculum directory.",
)
parser.add_argument(
"--top_k",
type=int,
default=1,
help="Top k decodes to retrieve from the GNN",
)
args = parser.parse_args()
logging.basicConfig(level=logging.INFO)

Expand Down Expand Up @@ -129,22 +143,22 @@ def main():

assert outputs.size(0) == curriculum_params["num_dirs"]

predicted_label_ints = outputs.argmax(dim=1)
_, predicted_label_ints = outputs.topk(args.top_k)
n_saved = 0
for situation_num in range(curriculum_params["num_dirs"]):
input_situation_dir = args.curriculum_path / f"situation_{situation_num}"
feature_yamls = sorted(input_situation_dir.glob("feature*"))
if len(feature_yamls) == 1:
# Load features, update them, then save
with open(
input_situation_dir / feature_yamls[0], encoding="utf-8"
feature_yamls[0], encoding="utf-8"
) as feature_yaml_in:
features = yaml.safe_load(feature_yaml_in)

updated_features = update_features_yaml(
features,
predicted_object=STRING_OBJECT_LABELS[
predicted_label_ints[situation_num]
predicted_object=[STRING_OBJECT_LABELS[
predicted_label_ints[situation_num][i]] for i in range(args.top_k)
],
)

Expand Down
3 changes: 1 addition & 2 deletions adam_preprocessing/shape_stroke_graph_learner.py
Original file line number Diff line number Diff line change
Expand Up @@ -161,8 +161,7 @@ def main():
logging.info(
"{}, loss: {:.4e}, acc: {}, test acc :{}".format(
step, train_loss.item(), acc, test_acc
),
flush=True,
)
)
logging.info("Best test acc is {}".format(best_acc))
logging.info(f"Saving model state dict to {args.save_model_to}")
Expand Down
2 changes: 1 addition & 1 deletion adam_preprocessing/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -66,7 +66,7 @@ def get_stroke_data(
feature_yamls = sorted(situation_dir.glob("feature*"))
if len(feature_yamls) == 1:
with open(
situation_dir / feature_yamls[0], encoding="utf-8"
feature_yamls[0], encoding="utf-8"
) as feature_yaml_in:
features = yaml.safe_load(feature_yaml_in)

Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,60 @@
_includes:
- "../../root.params"

# Learner Configuration
learner: simulated-integrated-learner-params
object_learner:
learner_type: "subset"
ontology: "phase3"
min_continuous_feature_match_score: 0.05
attribute_learner:
learner_type: "none"
relation_learner:
learner_type: "none"
action_learner:
learner_type: "none"
plural_learner:
learner_type: "none"
affordance_learner:
learner_type: "none"
include_functional_learner: false
include_generics_learner: false
suppress_error: false

# Curriculum Configuration
curriculum: "phase3"
train_curriculum:
curriculum_type: "training"
curriculum: "m5_objects_v0_with_mugs_post_gnn"
color_is_rgb: True
test_curriculum:
curriculum_type: "testing"
curriculum: "m5_objects_v0_with_mugs_eval_post_gnn"
color_is_rgb: True

# Experiment Configuration
experiment: "m5_objects_v0_with_mugs_full"
experiment_group_dir: '%adam_experiment_root%/learners/%learner%/experiments/%train_curriculum.curriculum%/'
log_learner_state: true
experiment_type: simulated

# Hypothesis Logging
hypothesis_log_dir: "%experiment_group_dir%/hypotheses"
log_hypothesis_every_n_steps: 250

# Debug Configuration
debug_log_directory: "%experiment_group_dir%/graphs"
debug_perception_log_dir: "%experiment_group_dir%/perception_graphs"

# Observer Params
post_observer:
experiment_output_path: "%experiment_group_dir%"
copy_curriculum: true
file_name: "post_decode"

test_observer:
experiment_output_path: "%post_observer.experiment_output_path%/test_curriculums/%test_curriculum.curriculum%/"
copy_curriculum: true
file_name: "post_decode"
calculate_accuracy_by_language: true
calculate_overall_accuracy: true
Original file line number Diff line number Diff line change
@@ -0,0 +1,60 @@
_includes:
- "../../root.params"

# Learner Configuration
learner: simulated-integrated-learner-params
object_learner:
learner_type: "subset"
ontology: "phase3"
min_continuous_feature_match_score: 0.05
attribute_learner:
learner_type: "none"
relation_learner:
learner_type: "none"
action_learner:
learner_type: "none"
plural_learner:
learner_type: "none"
affordance_learner:
learner_type: "none"
include_functional_learner: false
include_generics_learner: false
suppress_error: false

# Curriculum Configuration
curriculum: "phase3"
train_curriculum:
curriculum_type: "training"
curriculum: "m6_objects_downsampled_10pertype_post_gnn"
color_is_rgb: True
test_curriculum:
curriculum_type: "testing"
curriculum: "m5_objects_v0_with_mugs_eval_10pertype_post_gnn"
color_is_rgb: True

# Experiment Configuration
experiment: "m6_objects_downsampled_10pertype"
experiment_group_dir: '%adam_experiment_root%/learners/%learner%/experiments/%train_curriculum.curriculum%/'
log_learner_state: true
experiment_type: simulated

# Hypothesis Logging
hypothesis_log_dir: "%experiment_group_dir%/hypotheses"
log_hypothesis_every_n_steps: 250

# Debug Configuration
debug_log_directory: "%experiment_group_dir%/graphs"
debug_perception_log_dir: "%experiment_group_dir%/perception_graphs"

# Observer Params
post_observer:
experiment_output_path: "%experiment_group_dir%"
copy_curriculum: true
file_name: "post_decode"

test_observer:
experiment_output_path: "%post_observer.experiment_output_path%/test_curriculums/%test_curriculum.curriculum%/"
copy_curriculum: true
file_name: "post_decode"
calculate_accuracy_by_language: true
calculate_overall_accuracy: true
Original file line number Diff line number Diff line change
@@ -0,0 +1,60 @@
_includes:
- "../../root.params"

# Learner Configuration
learner: simulated-integrated-learner-params
object_learner:
learner_type: "subset"
ontology: "phase3"
min_continuous_feature_match_score: 0.05
attribute_learner:
learner_type: "none"
relation_learner:
learner_type: "none"
action_learner:
learner_type: "none"
plural_learner:
learner_type: "none"
affordance_learner:
learner_type: "none"
include_functional_learner: false
include_generics_learner: false
suppress_error: false

# Curriculum Configuration
curriculum: "phase3"
train_curriculum:
curriculum_type: "training"
curriculum: "m6_objects_downsampled_2pertype_post_gnn"
color_is_rgb: True
test_curriculum:
curriculum_type: "testing"
curriculum: "m5_objects_v0_with_mugs_eval_2pertype_post_gnn"
color_is_rgb: True

# Experiment Configuration
experiment: "m6_objects_downsampled_2pertype"
experiment_group_dir: '%adam_experiment_root%/learners/%learner%/experiments/%train_curriculum.curriculum%/'
log_learner_state: true
experiment_type: simulated

# Hypothesis Logging
hypothesis_log_dir: "%experiment_group_dir%/hypotheses"
log_hypothesis_every_n_steps: 250

# Debug Configuration
debug_log_directory: "%experiment_group_dir%/graphs"
debug_perception_log_dir: "%experiment_group_dir%/perception_graphs"

# Observer Params
post_observer:
experiment_output_path: "%experiment_group_dir%"
copy_curriculum: true
file_name: "post_decode"

test_observer:
experiment_output_path: "%post_observer.experiment_output_path%/test_curriculums/%test_curriculum.curriculum%/"
copy_curriculum: true
file_name: "post_decode"
calculate_accuracy_by_language: true
calculate_overall_accuracy: true
60 changes: 60 additions & 0 deletions parameters/experiments/p3/m6_objects_v0_with_mugs_top2.params
Original file line number Diff line number Diff line change
@@ -0,0 +1,60 @@
_includes:
- "../../root.params"

# Learner Configuration
learner: simulated-integrated-learner-params
object_learner:
learner_type: "subset"
ontology: "phase3"
min_continuous_feature_match_score: 0.05
attribute_learner:
learner_type: "none"
relation_learner:
learner_type: "none"
action_learner:
learner_type: "none"
plural_learner:
learner_type: "none"
affordance_learner:
learner_type: "none"
include_functional_learner: false
include_generics_learner: false
suppress_error: false

# Curriculum Configuration
curriculum: "phase3"
train_curriculum:
curriculum_type: "training"
curriculum: "m6_objects_v0_with_mugs_post_gnn_top2"
color_is_rgb: True
test_curriculum:
curriculum_type: "testing"
curriculum: "m6_objects_v0_with_mugs_eval_post_gnn_top2"
color_is_rgb: True

# Experiment Configuration
experiment: "m6_unknown_objects"
experiment_group_dir: '%adam_experiment_root%/learners/%learner%/experiments/%train_curriculum.curriculum%/'
log_learner_state: true
experiment_type: simulated

# Hypothesis Logging
hypothesis_log_dir: "%experiment_group_dir%/hypotheses"
log_hypothesis_every_n_steps: 250

# Debug Configuration
debug_log_directory: "%experiment_group_dir%/graphs"
debug_perception_log_dir: "%experiment_group_dir%/perception_graphs"

# Observer Params
post_observer:
experiment_output_path: "%experiment_group_dir%"
copy_curriculum: true
file_name: "post_decode"

test_observer:
experiment_output_path: "%post_observer.experiment_output_path%/test_curriculums/%test_curriculum.curriculum%/"
copy_curriculum: true
file_name: "post_decode"
calculate_accuracy_by_language: true
calculate_overall_accuracy: true
Loading