Skip to content

Commit

Permalink
Merge pull request #204 from tritonuas/fix/pipeline-crashes
Browse files Browse the repository at this point in the history
fix pipeline crashes
  • Loading branch information
atar13 authored Jun 24, 2024
2 parents 0750818 + 303ed72 commit f581b6d
Show file tree
Hide file tree
Showing 16 changed files with 205 additions and 92 deletions.
3 changes: 0 additions & 3 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -23,9 +23,6 @@ images/*

tests/integration/images/*
!tests/integration/images/.gitkeep
!tests/integration/images/saliency/
!tests/integration/images/saliency/.gitkeep
tests/integration/images/saliency/*.jpg

.vscode/*
!.vscode/c_cpp_properties.json
Expand Down
16 changes: 10 additions & 6 deletions CMakeLists.txt
Original file line number Diff line number Diff line change
Expand Up @@ -153,35 +153,39 @@ add_custom_target(pull_models
# Saliency model
add_custom_target(pull_saliency
COMMAND gdown 1S1IfXlGs_pCH49DwZmbD-tZA5YH0A1gx -O ${CMAKE_BINARY_DIR}/../models/torchscript_19.pth
USES_TERMINAL
)

# Matching model
add_custom_target(pull_matching
COMMAND gdown 1NeFiAfSSLXAZWlehfd0ox7p_jFF4YdrO -O ${CMAKE_BINARY_DIR}/../models/target_siamese_1.pt
USES_TERMINAL
)


# Segmentation model
add_custom_target(pull_segmentation
COMMAND gdown 1U2EbfJFzcjVnjTuD6ud-bIf8YOiEassf -O ${CMAKE_BINARY_DIR}/../models/fcn-model_20-epochs_06-01-2023T21-16-02.pth
USES_TERMINAL
)
# =============================

# =============================
# Pull testing images
add_custom_target(pull_test_images
DEPENDS pull_matching_test_images pull_saliency_test_images
)

# pull cropped images from fraternal_targets testing folder
add_custom_target(pull_matching_test_images
COMMAND gdown 1opxdXw75jSQZu9s61njE6hjQkfHiKvgp -O ${CMAKE_BINARY_DIR}/../tests/integration/images/test.zip &&
mkdir -p ${CMAKE_BINARY_DIR}/../tests/integration/images/matching_cropped &&
unzip ${CMAKE_BINARY_DIR}/../tests/integration/images/test.zip -d ${CMAKE_BINARY_DIR}/../tests/integration/images/matching_cropped
COMMAND gdown 1vmP3HUS1SyqhdtJrP4QuFpGbyoyfaYSe --folder -O ${CMAKE_BINARY_DIR}/../tests/integration/images/matching
USES_TERMINAL
)

# pull cropped images from saliency images testing folder
add_custom_target(pull_saliency_test_images
COMMAND gdown 1HJLdrm0X3VxnlQ3Z58EE9-Y5VX1NlhAz -O ${CMAKE_BINARY_DIR}/../tests/integration/images/saliency.zip &&
mkdir -p ${CMAKE_BINARY_DIR}/../tests/integration/images/saliency &&
unzip ${CMAKE_BINARY_DIR}/../tests/integration/images/saliency.zip -d ${CMAKE_BINARY_DIR}/../tests/integration/images/saliency
COMMAND gdown 1JvtQUroZJHo51E37_IA2D1mfdJj2smyR --folder -O ${CMAKE_BINARY_DIR}/../tests/integration/images/saliency
USES_TERMINAL
)

# =============================
Expand Down
2 changes: 1 addition & 1 deletion configs/dev-config.json
Original file line number Diff line number Diff line change
Expand Up @@ -49,7 +49,7 @@
},
"cv": {
"matching_model_dir": "/workspaces/obcpp/models/target_siamese_1.pt",
"segmentation_model_dir": "/workspaces/obcpp/models/fcn.pth",
"segmentation_model_dir": "/workspaces/obcpp/models/fcn-model_20-epochs_06-01-2023T21-16-02.pth",
"saliency_model_dir": "/workspaces/obcpp/models/torchscript_19.pth",
"not_stolen_addr": "localhost",
"not_stolen_port": 5069
Expand Down
8 changes: 7 additions & 1 deletion include/camera/mock.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@
#include <shared_mutex>
#include <deque>
#include <vector>
#include <filesystem>

#include "camera/interface.hpp"
#include "network/mavlink.hpp"
Expand Down Expand Up @@ -50,7 +51,7 @@ class MockCamera : public CameraInterface {
void startStreaming() override;

private:
std::vector<cv::Mat> mock_images;
std::vector<ImageData> mock_images;

std::atomic_bool isTakingPictures;

Expand All @@ -61,6 +62,11 @@ class MockCamera : public CameraInterface {
std::shared_mutex imageQueueLock;

std::thread captureThread;

// Get telemetry from JSON file adjacent to given image file.
// Ex: given path to "0003.jpg", telemetry will be looked for in
// "0003.json"
std::optional<ImageTelemetry> getTelemetryFromJsonFile(std::filesystem::path img_path);
};

#endif // INCLUDE_CAMERA_MOCK_HPP_
3 changes: 2 additions & 1 deletion include/cv/aggregator.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,8 @@
struct CVResults {
std::vector<DetectedTarget> detected_targets;
// mapping from bottle -> index into detected_targets
std::unordered_map<BottleDropIndex, size_t> matches;
// (optional is none if we don't have a match yet)
std::unordered_map<BottleDropIndex, std::optional<size_t>> matches;
};


Expand Down
4 changes: 3 additions & 1 deletion include/ticks/mission_prep.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,8 @@
#include <memory>
#include <chrono>
#include <string>
#include <utility>
#include <vector>

#include "ticks/tick.hpp"
#include "cv/pipeline.hpp"
Expand All @@ -25,7 +27,7 @@ class MissionPrepTick : public Tick {

Tick* tick() override;
private:
std::vector<std::pair<cv::Mat, BottleDropIndex>>
std::vector<std::pair<cv::Mat, BottleDropIndex>>
generateReferenceImages(std::array<Bottle, NUM_AIRDROP_BOTTLES> competitionObjectives);

std::string getNotStolenRoute(const Bottle& target);
Expand Down
43 changes: 37 additions & 6 deletions src/camera/mock.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,7 @@
#include <filesystem>

#include <loguru.hpp>
#include "nlohmann/json.hpp"

#include "network/mavlink.hpp"
#include "utilities/locks.hpp"
Expand All @@ -21,7 +22,14 @@ MockCamera::MockCamera(CameraConfig config) : CameraInterface(config) {
cv::Mat img = cv::imread(dir_entry.path().string());
// if the image is read
if (img.data != NULL) {
this->mock_images.push_back(img);
std::optional<ImageTelemetry> telemetry =
this->getTelemetryFromJsonFile(dir_entry.path());

ImageData img_data(
img,
0,
telemetry);
this->mock_images.push_back(img_data);
}
});
}
Expand Down Expand Up @@ -93,18 +101,41 @@ std::optional<ImageData> MockCamera::takePicture(const std::chrono::milliseconds
std::shared_ptr<MavlinkClient> mavlinkClient) {
int random_idx = randomInt(0, this->mock_images.size()-1);

std::optional<ImageTelemetry> telemetry = queryMavlinkImageTelemetry(mavlinkClient);

cv:Mat newImage = this->mock_images.at(random_idx);
ImageData img_data = this->mock_images.at(random_idx);
uint64_t timestamp = getUnixTime_s().count();

// if we can't find corresonding telemtry json, just query mavlink
if (!img_data.TELEMETRY.has_value()) {
img_data.TELEMETRY = queryMavlinkImageTelemetry(mavlinkClient);
}

ImageData imageData {
.DATA = newImage,
.DATA = img_data.DATA,
.TIMESTAMP = timestamp,
.TELEMETRY = telemetry,
.TELEMETRY = img_data.TELEMETRY,
};

return imageData;
}

void MockCamera::startStreaming() {}

std::optional<ImageTelemetry> MockCamera::getTelemetryFromJsonFile(std::filesystem::path img_path) {
img_path.replace_extension("json");
std::ifstream telemetry_stream(img_path);
if (!telemetry_stream.is_open()) {
// no corresponding telemetry json found
return {};
}
nlohmann::json json = nlohmann::json::parse(telemetry_stream, nullptr, true, true);
return ImageTelemetry {
.latitude_deg = json["latitude_deg"],
.longitude_deg = json["longitude_deg"],
.altitude_agl_m = json["altitude_agl_m"],
.airspeed_m_s = json["airspeed_m_s"],
.heading_deg = json["heading_deg"],
.yaw_deg = json["yaw_deg"],
.pitch_deg = json["pitch_deg"],
.roll_deg = json["roll_deg"],
};
}
23 changes: 13 additions & 10 deletions src/cv/aggregator.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -7,11 +7,12 @@
CVAggregator::CVAggregator(Pipeline&& p): pipeline{p} {
this->num_worker_threads = 0;
this->results = std::make_shared<CVResults>();
this->results->matches[BottleDropIndex::A] = -1;
this->results->matches[BottleDropIndex::B] = -1;
this->results->matches[BottleDropIndex::C] = -1;
this->results->matches[BottleDropIndex::D] = -1;
this->results->matches[BottleDropIndex::E] = -1;
// set each bottle to be initially unmatched
this->results->matches[BottleDropIndex::A] = {};
this->results->matches[BottleDropIndex::B] = {};
this->results->matches[BottleDropIndex::C] = {};
this->results->matches[BottleDropIndex::D] = {};
this->results->matches[BottleDropIndex::E] = {};
}

CVAggregator::~CVAggregator() {}
Expand Down Expand Up @@ -50,21 +51,23 @@ void CVAggregator::worker(ImageData image, int thread_num) {
// newly inserted target, once we insert it after the if/else
size_t detected_target_index = this->results->detected_targets.size();

size_t curr_match_idx = this->results->matches[curr_target.likely_bottle];
if (curr_match_idx < 0) {
std::optional<size_t> curr_match_idx =
this->results->matches[curr_target.likely_bottle];
if (!curr_match_idx.has_value()) {
LOG_F(INFO, "Made first match between target %ld and bottle %d",
detected_target_index, curr_target.likely_bottle);

this->results->matches[curr_target.likely_bottle] = detected_target_index;
} else {
auto curr_match = this->results->detected_targets[curr_match_idx];
auto curr_match = this->results->detected_targets[curr_match_idx.value()];

if (curr_match.match_distance > curr_target.match_distance) {
LOG_F(INFO,
"Swapping match on bottle %d from target %ld -> %ld (distance %f -> %f)",
static_cast<int>(curr_match.likely_bottle),
this->results->matches[curr_match.likely_bottle],
detected_target_index, curr_match.match_distance,
this->results->matches[curr_match.likely_bottle].value_or(0),
detected_target_index,
curr_match.match_distance,
curr_target.match_distance);

this->results->matches[curr_match.likely_bottle] = detected_target_index;
Expand Down
3 changes: 3 additions & 0 deletions src/cv/matching.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -83,6 +83,9 @@ Matching::Matching(std::array<Bottle, NUM_AIRDROP_BOTTLES>
* NOTE: Matching only occurs if loading model and ref. images was successful.
*/
MatchResult Matching::match(const CroppedTarget& croppedTarget) {
if (referenceFeatures.empty()) {
return MatchResult(BottleDropIndex::A, -std::numeric_limits<double>::infinity());
}
std::vector<torch::jit::IValue> input = toInput(croppedTarget.croppedImage);
torch::Tensor output = this->torch_module.forward(input).toTensor();
int minIndex = 0;
Expand Down
12 changes: 10 additions & 2 deletions src/cv/saliency.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -35,7 +35,7 @@ std::vector<CroppedTarget> Saliency::salience(cv::Mat image) {
tensor = tensor.toType(c10::kFloat).div(255);
// swap axis
tensor = Saliency::transpose(tensor, { (2), (0), (1) });

// eventually add device as member of Saliency
c10::Device device = torch::cuda::is_available() ? torch::kCUDA : torch::kCPU;
auto tensor_cuda = tensor.to(device);
Expand All @@ -60,7 +60,15 @@ std::vector<CroppedTarget> Saliency::salience(cv::Mat image) {
std::vector<CroppedTarget> targets;
targets = extractTargets(listDetections, image);

LOG_F(INFO, "salience ok");
LOG_F(INFO, "saliency found %ld targets", targets.size());
for (auto const& target : targets) {
LOG_F(INFO, "\ttarget at bbox: (%d, %d, %d, %d). ismannikin: %d",
target.bbox.x1,
target.bbox.y1,
target.bbox.x2,
target.bbox.y2,
target.isMannikin);
}
return targets;
}

Expand Down
14 changes: 14 additions & 0 deletions src/ticks/cv_loiter.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -24,6 +24,20 @@ void CVLoiterTick::setStatus(Status status) {
Tick* CVLoiterTick::tick() {
// Tick is called if Search Zone coverage path is finished

// // print out current state of matching for debugging
// LockPtr<CVResults> cv_results = this->state->getCV()->getResults();
// for (const auto& match: cv_results.data->matches) {
// if (match.second.has_value()) {
// LOG_F(INFO, "Bottle %d is matched with target at lat: %f, lon: %f",
// match.first,
// cv_results.data->detected_targets.at(match.second.value()).coord.latitude(),
// cv_results.data->detected_targets.at(match.second.value()).coord.longitude()
// );
// } else {
// LOG_F(INFO, "Bottle %d is not matched with a target", match.first);
// }
// }

// Check status of the CV Results
if (status == Status::Validated) {
return new AirdropPrepTick(this->state);
Expand Down
6 changes: 2 additions & 4 deletions src/ticks/fly_search.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -57,12 +57,10 @@ Tick* FlySearchTick::tick() {
}

if (photo.has_value()) {
// TODO: debug why this is crashing and fix it

// Update the last photo time
// this->last_photo_time = getUnixTime_ms();
this->last_photo_time = getUnixTime_ms();
// Run the pipeline on the photo
// this->state->getCV()->runPipeline(photo.value());
this->state->getCV()->runPipeline(photo.value());
}
}
this->curr_mission_item = curr_waypoint;
Expand Down
34 changes: 25 additions & 9 deletions src/ticks/mission_prep.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -55,25 +55,41 @@ Tick* MissionPrepTick::tick() {
}


std::vector<std::pair<cv::Mat, BottleDropIndex>>
MissionPrepTick::generateReferenceImages(std::array<Bottle, NUM_AIRDROP_BOTTLES> competitionObjectives) {

std::vector<std::pair<cv::Mat, BottleDropIndex>> MissionPrepTick::generateReferenceImages
(std::array<Bottle, NUM_AIRDROP_BOTTLES> competitionObjectives) {
std::vector<std::pair<cv::Mat, BottleDropIndex>> ref_imgs;

int curr_bottle_idx = BottleDropIndex::A;
int curr_bottle_idx = BottleDropIndex::Undefined;
for (const auto& bottle : competitionObjectives) {
httplib::Client client(this->state->config.cv.not_stolen_addr, this->state->config.cv.not_stolen_port);
curr_bottle_idx++;

// don't generate reference images for mannikin since matching model doesn't
// match mannikins (handled by saliency)
if (bottle.ismannikin()) {
continue;
}

httplib::Client client(this->state->config.cv.not_stolen_addr,
this->state->config.cv.not_stolen_port);
auto res = client.Get(this->getNotStolenRoute(bottle));
// connection failed
if (!res) {
LOG_F(ERROR, "Failed to send request to not-stolen at %s:%u. Reason: %s",
this->state->config.cv.not_stolen_addr.c_str(),
this->state->config.cv.not_stolen_port,
httplib::to_string(res.error()).c_str());
return {};
}

if (res->status != 200) {
LOG_F(ERROR, "Got invalid response from not-stolen: %s", res->body.c_str());
continue;
}
std::vector<uint8_t> vectordata(res->body.begin(),res->body.end());
std::vector<uint8_t> vectordata(res->body.begin(), res->body.end());
cv::Mat data_mat(vectordata, true);
cv::Mat ref_img(cv::imdecode(data_mat,1)); //put 0 if you want greyscale
cv::Mat ref_img(cv::imdecode(data_mat, 1)); // put 0 if you want greyscale

ref_imgs.push_back({ref_img, (BottleDropIndex)curr_bottle_idx});
curr_bottle_idx++;
}
return ref_imgs;
}
Expand All @@ -85,7 +101,7 @@ std::string MissionPrepTick::getNotStolenRoute(const Bottle& target) {
std::string shape_type = ODLCShapeToString(target.shape());
std::string shape_color = ODLCColorToString(target.shapecolor());

return std::string("/generate?shape_type=") + shape_type +
return std::string("/generate?shape_type=") + shape_type +
std::string("&shape_color=") + shape_color +
std::string("&char_type=") + char_type +
std::string("&char_color=") + char_color;
Expand Down
4 changes: 2 additions & 2 deletions src/utilities/datatypes.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -126,7 +126,7 @@ std::string ODLCColorToString(const ODLCColor& color) {
case ODLCColor::Brown: return "BROWN";
case ODLCColor::Orange: return "ORANGE";
// maybe return optional nullopt here instead of defaulting to WHITE
// in case of an unknown color
// in case of an unknown color
default: return "WHITE";
}
}
}
Loading

0 comments on commit f581b6d

Please sign in to comment.