Skip to content

Commit

Permalink
Do not export deleted senses (#643)
Browse files Browse the repository at this point in the history
* Do not export deleted senses

* Exclude non-active senses during the export
* Minor code clean-up
  • Loading branch information
jasonleenaylor authored Aug 25, 2020
1 parent 0681d5b commit efee3f8
Show file tree
Hide file tree
Showing 2 changed files with 17 additions and 8 deletions.
8 changes: 7 additions & 1 deletion Backend.Tests/LiftControllerTests.cs
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@
using System.IO;
using System.Linq;
using System.Text;
using System.Text.RegularExpressions;
using BackendFramework.Controllers;
using BackendFramework.Helper;
using BackendFramework.Interfaces;
Expand Down Expand Up @@ -179,7 +180,9 @@ public void TestExportDeleted()
_projServ.Create(proj);

var word = RandomWord(proj.Id);
var secondWord = RandomWord(proj.Id);
var createdWord = _wordrepo.Create(word).Result;
_wordrepo.Create(secondWord);

word.Id = "";
word.Vernacular = "updated";
Expand All @@ -192,8 +195,10 @@ public void TestExportDeleted()
var exportPath = Path.Combine(combinePath, proj.Id, "Export", "LiftExport",
Path.Combine("Lift", "NewLiftFile.lift"));
var text = File.ReadAllText(exportPath, Encoding.UTF8);
// Make sure we exported 2 live and one dead entry
Assert.That(Regex.Matches(text, "<entry").Count, Is.EqualTo(3));
// There is only one deleted word
Assert.AreEqual(text.IndexOf("dateDeleted"), text.LastIndexOf("dateDeleted"));
Assert.That(text.IndexOf("dateDeleted"), Is.EqualTo(text.LastIndexOf("dateDeleted")));
}

[Test]
Expand Down Expand Up @@ -256,6 +261,7 @@ public void TestRoundtrip()
fstream.Close();

var allWords = _wordrepo.GetAllWords(proj.Id);
Assert.AreEqual(allWords.Result.Count, dataSet.Value.NumOfWords);
// Export
var exportedFilePath = _liftController.CreateLiftExport(proj.Id);
var exportedDirectory = Path.GetDirectoryName(exportedFilePath);
Expand Down
17 changes: 10 additions & 7 deletions Backend/Services/LiftApiServices.cs
Original file line number Diff line number Diff line change
Expand Up @@ -143,8 +143,10 @@ public string LiftExport(string projectId)
var frontier = _repo.GetFrontier(projectId).Result;
var activeWords = frontier.Where(x => x.Senses.Any(s => s.Accessibility == State.Active)).ToList();

// TODO: this is wrong, deleted is a subset of active, are not exclusive
var deletedWords = allWords.Where(x => activeWords.Contains(x)).ToList();
// All words in the frontier with any senses are considered current. The Combine does not import senseless entries
// and the interface is supposed to prevent creating them. So the the words found in allWords, but not in activeWords
// are exported as 'deleted'.
var deletedWords = allWords.Where(x => !activeWords.Contains(x)).ToList();
foreach (var wordEntry in activeWords)
{
var entry = new LexEntry();
Expand Down Expand Up @@ -254,11 +256,12 @@ private void AddVern(LexEntry entry, Word wordEntry, string projectId)
/// <summary> Adds each sense of a word to be written out to lift </summary>
private void AddSenses(LexEntry entry, Word wordEntry)
{
for (var i = 0; i < wordEntry.Senses.Count; i++)
var activeSenses = wordEntry.Senses.Where(s => s.Accessibility == State.Active).ToList();
foreach (var currentSense in activeSenses)
{
// Merge in senses
var dict = new Dictionary<string, string>();
foreach (var gloss in wordEntry.Senses[i].Glosses)
foreach (var gloss in currentSense.Glosses)
{
if (dict.ContainsKey(gloss.Language))
{
Expand All @@ -277,12 +280,12 @@ private void AddSenses(LexEntry entry, Word wordEntry)
entry.Senses.Add(lexSense);

// Merge in semantic domains
foreach (var semdom in wordEntry.Senses[i].SemanticDomains)
foreach (var semDom in currentSense.SemanticDomains)
{
var orc = new OptionRefCollection();
orc.Add(semdom.Id + " " + semdom.Name);
orc.Add(semDom.Id + " " + semDom.Name);

entry.Senses[i].Properties.Add(
lexSense.Properties.Add(
new KeyValuePair<string, IPalasoDataObjectProperty>("semantic-domain-ddp4", orc));
}
}
Expand Down

0 comments on commit efee3f8

Please sign in to comment.