diff --git a/.github/workflows/maven.yml b/.github/workflows/maven.yml
index c7fc132c4..e38e20acc 100644
--- a/.github/workflows/maven.yml
+++ b/.github/workflows/maven.yml
@@ -45,6 +45,6 @@ jobs:
distribution: temurin
java-version: ${{ matrix.java }}
- name: Build with Maven
- run: mvn -V clean test install --no-transfer-progress -Pjacoco
+ run: mvn -V clean test install --no-transfer-progress -Pjacoco -Pci
- name: Jacoco
run: mvn jacoco:report
diff --git a/opennlp-tools/pom.xml b/opennlp-tools/pom.xml
index 4453b3964..8b82760ff 100644
--- a/opennlp-tools/pom.xml
+++ b/opennlp-tools/pom.xml
@@ -57,12 +57,6 @@
test
-
- org.slf4j
- slf4j-simple
- test
-
-
com.ginsberg
@@ -71,6 +65,13 @@
test
+
+ io.github.hakky54
+ logcaptor
+ ${logcaptor.version}
+ test
+
+
@@ -121,7 +122,7 @@
org.apache.maven.plugins
maven-surefire-plugin
- -Xmx2048m -Dorg.slf4j.simpleLogger.defaultLogLevel=off -javaagent:${settings.localRepository}/com/ginsberg/junit5-system-exit/${junit5-system-exit.version}/junit5-system-exit-${junit5-system-exit.version}.jar
+ -Xmx2048m -DOPENNLP_DOWNLOAD_HOME=${opennlp.download.home} -javaagent:${settings.localRepository}/com/ginsberg/junit5-system-exit/${junit5-system-exit.version}/junit5-system-exit-${junit5-system-exit.version}.jar
${opennlp.forkCount}
false
@@ -131,10 +132,29 @@
+
+
+ org.apache.maven.plugins
+ maven-failsafe-plugin
+ ${maven.failsafe.plugin}
+
+ -DOPENNLP_DOWNLOAD_HOME=${opennlp.download.home}
+
+
+
+ ${user.home}
+
+
+
+ ci
+
+ ${project.build.directory}
+
+
jmh
diff --git a/opennlp-tools/src/main/java/opennlp/tools/util/DownloadUtil.java b/opennlp-tools/src/main/java/opennlp/tools/util/DownloadUtil.java
index 76fb7bc63..11b328dae 100644
--- a/opennlp-tools/src/main/java/opennlp/tools/util/DownloadUtil.java
+++ b/opennlp-tools/src/main/java/opennlp/tools/util/DownloadUtil.java
@@ -32,6 +32,7 @@
import java.security.MessageDigest;
import java.security.NoSuchAlgorithmException;
import java.util.ArrayList;
+import java.util.Collections;
import java.util.Formatter;
import java.util.HashMap;
import java.util.List;
@@ -72,18 +73,12 @@ public enum ModelType {
}
}
- private static final String BASE_URL = "https://dlcdn.apache.org/opennlp/";
- private static final String MODELS_UD_MODELS_1_2 = "models/ud-models-1.2/";
+ private static final String BASE_URL =
+ System.getProperty("OPENNLP_DOWNLOAD_BASE_URL", "https://dlcdn.apache.org/opennlp/");
+ private static final String MODEL_URI_PATH =
+ System.getProperty("OPENNLP_DOWNLOAD_MODEL_PATH", "models/ud-models-1.2/");
- public static final Map> available_models;
-
- static {
- try {
- available_models = new DownloadParser(new URL(BASE_URL + MODELS_UD_MODELS_1_2)).getAvailableModels();
- } catch (MalformedURLException e) {
- throw new RuntimeException(e);
- }
- }
+ private static Map> availableModels;
/**
* Triggers a download for the specified {@link DownloadUtil.ModelType}.
@@ -98,14 +93,14 @@ public enum ModelType {
public static T downloadModel(String language, ModelType modelType,
Class type) throws IOException {
- if (available_models.containsKey(language)) {
- final String url = (available_models.get(language).get(modelType));
+ if (getAvailableModels().containsKey(language)) {
+ final String url = (getAvailableModels().get(language).get(modelType));
if (url != null) {
return downloadModel(new URL(url), type);
}
}
- throw new IOException("Invalid model.");
+ throw new IOException("There is no model available: " + language + " " + modelType.name);
}
/**
@@ -124,9 +119,15 @@ public static T downloadModel(String language, ModelType m
*/
public static T downloadModel(URL url, Class type) throws IOException {
- final Path homeDirectory = Paths.get(System.getProperty("user.home") + "/.opennlp/");
+ final Path homeDirectory = Paths.get(System.getProperty("OPENNLP_DOWNLOAD_HOME",
+ System.getProperty("user.home"))).resolve(".opennlp");
+
if (!Files.isDirectory(homeDirectory)) {
- homeDirectory.toFile().mkdir();
+ try {
+ Files.createDirectories(homeDirectory);
+ } catch (IOException e) {
+ throw new RuntimeException(e);
+ }
}
final String filename = url.toString().substring(url.toString().lastIndexOf("/") + 1);
@@ -141,8 +142,10 @@ public static T downloadModel(URL url, Class type) thro
validateModel(new URL(url + ".sha512"), localFile);
-
logger.debug("Download complete.");
+ } else {
+ System.out.println("Model file already exists. Skipping download.");
+ logger.debug("Model file '{}' already exists. Skipping download.", filename);
}
try {
@@ -152,6 +155,17 @@ public static T downloadModel(URL url, Class type) thro
}
}
+ public static Map> getAvailableModels() {
+ if (availableModels == null) {
+ try {
+ availableModels = new DownloadParser(new URL(BASE_URL + MODEL_URI_PATH)).getAvailableModels();
+ } catch (MalformedURLException e) {
+ throw new RuntimeException(e);
+ }
+ }
+ return Collections.unmodifiableMap(availableModels);
+ }
+
/**
* Validates the downloaded model.
*
diff --git a/opennlp-tools/src/test/java/opennlp/tools/util/AbstractDownloadUtilTest.java b/opennlp-tools/src/test/java/opennlp/tools/util/AbstractDownloadUtilTest.java
new file mode 100644
index 000000000..9fdde8b61
--- /dev/null
+++ b/opennlp-tools/src/test/java/opennlp/tools/util/AbstractDownloadUtilTest.java
@@ -0,0 +1,79 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package opennlp.tools.util;
+
+import java.io.IOException;
+import java.net.InetSocketAddress;
+import java.net.Socket;
+import java.nio.file.DirectoryStream;
+import java.nio.file.Files;
+import java.nio.file.Path;
+import java.nio.file.Paths;
+
+import org.junit.jupiter.api.BeforeAll;
+
+import opennlp.tools.EnabledWhenCDNAvailable;
+
+import static org.junit.jupiter.api.Assertions.fail;
+
+@EnabledWhenCDNAvailable(hostname = "dlcdn.apache.org")
+public abstract class AbstractDownloadUtilTest {
+
+ private static final String APACHE_CDN = "dlcdn.apache.org";
+
+ @BeforeAll
+ public static void cleanupWhenOnline() {
+ boolean isOnline;
+ try (Socket socket = new Socket()) {
+ socket.connect(new InetSocketAddress(APACHE_CDN, 80), EnabledWhenCDNAvailable.TIMEOUT_MS);
+ isOnline = true;
+ } catch (IOException e) {
+ // Unreachable, unresolvable or timeout
+ isOnline = false;
+ }
+ // If CDN is available -> go cleanup in preparation of the actual tests
+ if (isOnline) {
+ wipeExistingModelFiles("-tokens-");
+ wipeExistingModelFiles("-sentence-");
+ wipeExistingModelFiles("-pos-");
+ wipeExistingModelFiles("-lemmas-");
+ }
+ }
+
+
+ /*
+ * Helper method that wipes out mode files if they exist on the text execution env.
+ * Those model files are wiped from a hidden '.opennlp' subdirectory.
+ *
+ * Thereby, a clean download can be guaranteed - ín CDN is available and test are executed.
+ */
+ private static void wipeExistingModelFiles(final String fragment) {
+ final Path dir = Paths.get(System.getProperty("OPENNLP_DOWNLOAD_HOME",
+ System.getProperty("user.home"))).resolve(".opennlp");
+ if (Files.exists(dir)) {
+ try (DirectoryStream stream = Files.newDirectoryStream(dir, "*opennlp-*" + fragment + "*")) {
+ for (Path modelFileToWipe : stream) {
+ Files.deleteIfExists(modelFileToWipe);
+ }
+ } catch (IOException e) {
+ fail(e.getLocalizedMessage());
+ }
+ }
+ }
+
+}
diff --git a/opennlp-tools/src/test/java/opennlp/tools/util/DownloadUtilDownloadTwiceTest.java b/opennlp-tools/src/test/java/opennlp/tools/util/DownloadUtilDownloadTwiceTest.java
new file mode 100644
index 000000000..5f328b009
--- /dev/null
+++ b/opennlp-tools/src/test/java/opennlp/tools/util/DownloadUtilDownloadTwiceTest.java
@@ -0,0 +1,97 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package opennlp.tools.util;
+
+import java.io.IOException;
+import java.util.List;
+
+import ch.qos.logback.classic.Level;
+import ch.qos.logback.classic.Logger;
+import ch.qos.logback.classic.LoggerContext;
+import nl.altindag.log.LogCaptor;
+import org.junit.jupiter.api.AfterAll;
+import org.junit.jupiter.api.BeforeAll;
+import org.junit.jupiter.api.Test;
+import org.slf4j.LoggerFactory;
+
+import opennlp.tools.EnabledWhenCDNAvailable;
+import opennlp.tools.sentdetect.SentenceModel;
+
+import static org.junit.jupiter.api.Assertions.assertEquals;
+
+@EnabledWhenCDNAvailable(hostname = "dlcdn.apache.org")
+public class DownloadUtilDownloadTwiceTest extends AbstractDownloadUtilTest {
+
+ /*
+ * Programmatic change to debug log to ensure that we can see log messages to
+ * confirm no duplicate download is happening
+ */
+ @BeforeAll
+ public static void prepare() {
+ LoggerContext context = (LoggerContext) LoggerFactory.getILoggerFactory();
+ Logger logger = context.getLogger("opennlp");
+ logger.setLevel(Level.DEBUG);
+ }
+
+ /*
+ * Programmatic restore the default log level (= OFF) after the test
+ */
+ @AfterAll
+ public static void cleanup() {
+ LoggerContext context = (LoggerContext) LoggerFactory.getILoggerFactory();
+ Logger logger = context.getLogger("opennlp");
+ logger.setLevel(Level.OFF);
+ }
+
+ @Test
+ public void testDownloadModelTwice() throws IOException {
+ try (LogCaptor logCaptor = LogCaptor.forClass(DownloadUtil.class)) {
+
+ DownloadUtil.downloadModel("de",
+ DownloadUtil.ModelType.SENTENCE_DETECTOR, SentenceModel.class);
+
+ assertEquals(2, logCaptor.getDebugLogs().size());
+ checkDebugLogsContainMessageFragment(logCaptor.getDebugLogs(), "Download complete.");
+ logCaptor.clearLogs();
+
+ // try to download again
+ DownloadUtil.downloadModel("de",
+ DownloadUtil.ModelType.SENTENCE_DETECTOR, SentenceModel.class);
+ assertEquals(1, logCaptor.getDebugLogs().size());
+ checkDebugLogsContainMessageFragment(logCaptor.getDebugLogs(), "already exists. Skipping download.");
+ logCaptor.clearLogs();
+
+ DownloadUtil.downloadModel("de",
+ DownloadUtil.ModelType.SENTENCE_DETECTOR, SentenceModel.class);
+ assertEquals(1, logCaptor.getDebugLogs().size());
+ checkDebugLogsContainMessageFragment(logCaptor.getDebugLogs(), "already exists. Skipping download.");
+ logCaptor.clearLogs();
+
+ }
+ }
+
+ private void checkDebugLogsContainMessageFragment(List debugLogs, String message) {
+ for (String log : debugLogs) {
+ if (log.contains(message)) {
+ return;
+ }
+ }
+ throw new AssertionError("Expected message fragment not found in logs: " + message);
+ }
+
+}
diff --git a/opennlp-tools/src/test/java/opennlp/tools/util/DownloadUtilTest.java b/opennlp-tools/src/test/java/opennlp/tools/util/DownloadUtilTest.java
index 6ab0aa4cb..b8a61f910 100644
--- a/opennlp-tools/src/test/java/opennlp/tools/util/DownloadUtilTest.java
+++ b/opennlp-tools/src/test/java/opennlp/tools/util/DownloadUtilTest.java
@@ -18,16 +18,9 @@
package opennlp.tools.util;
import java.io.IOException;
-import java.net.InetSocketAddress;
-import java.net.Socket;
import java.net.URL;
-import java.nio.file.DirectoryStream;
-import java.nio.file.FileSystems;
-import java.nio.file.Files;
-import java.nio.file.Path;
import java.util.stream.Stream;
-import org.junit.jupiter.api.BeforeAll;
import org.junit.jupiter.params.ParameterizedTest;
import org.junit.jupiter.params.provider.Arguments;
import org.junit.jupiter.params.provider.MethodSource;
@@ -42,58 +35,18 @@
import static org.junit.jupiter.api.Assertions.assertNotNull;
import static org.junit.jupiter.api.Assertions.assertThrows;
import static org.junit.jupiter.api.Assertions.assertTrue;
-import static org.junit.jupiter.api.Assertions.fail;
-public class DownloadUtilTest {
-
- private static final String APACHE_CDN = "dlcdn.apache.org";
-
- @BeforeAll
- public static void cleanupWhenOnline() {
- boolean isOnline;
- try (Socket socket = new Socket()) {
- socket.connect(new InetSocketAddress(APACHE_CDN, 80), EnabledWhenCDNAvailable.TIMEOUT_MS);
- isOnline = true;
- } catch (IOException e) {
- // Unreachable, unresolvable or timeout
- isOnline = false;
- }
- // If CDN is available -> go cleanup in preparation of the actual tests
- if (isOnline) {
- wipeExistingModelFiles("-tokens-");
- wipeExistingModelFiles("-sentence-");
- }
- }
-
- /*
- * Helper method that wipes out mode files if they exist on the text execution env.
- * Those model files are wiped from the user's home hidden '.opennlp' subdirectory.
- *
- * Thereby, a clean download can be guaranteed - ín CDN is available and test are executed.
- */
- private static void wipeExistingModelFiles(final String fragment) {
- final String openNLPHomeDir = System.getProperty("user.home") + "/.opennlp/";
- final Path dir = FileSystems.getDefault().getPath(openNLPHomeDir);
- if (Files.exists(dir)) {
- try (DirectoryStream stream = Files.newDirectoryStream(dir, "*opennlp-*" + fragment + "*")) {
- for (Path modelFileToWipe: stream) {
- Files.deleteIfExists(modelFileToWipe);
- }
- } catch (IOException e) {
- fail(e.getLocalizedMessage());
- }
- }
- }
+public class DownloadUtilTest extends AbstractDownloadUtilTest {
@ParameterizedTest(name = "Verify \"{0}\" sentence model")
@ValueSource(strings = {"en", "fr", "de", "it", "nl", "bg", "ca", "cs", "da", "el",
- "es", "et", "eu", "fi", "hr", "hy", "is", "ka", "kk", "ko",
- "lv", "no", "pl", "pt", "ro", "ru", "sk", "sl", "sr", "sv",
- "tr", "uk"})
+ "es", "et", "eu", "fi", "hr", "hy", "is", "ka", "kk", "ko",
+ "lv", "no", "pl", "pt", "ro", "ru", "sk", "sl", "sr", "sv",
+ "tr", "uk"})
@EnabledWhenCDNAvailable(hostname = "dlcdn.apache.org")
public void testDownloadModelByLanguage(String lang) throws IOException {
SentenceModel model = DownloadUtil.downloadModel(lang,
- DownloadUtil.ModelType.SENTENCE_DETECTOR, SentenceModel.class);
+ DownloadUtil.ModelType.SENTENCE_DETECTOR, SentenceModel.class);
assertNotNull(model);
assertEquals(lang, model.getLanguage());
assertTrue(model.isLoadedFromSerialized());
@@ -114,8 +67,8 @@ public void testDownloadModelByURL(String language, URL url) throws IOException
@ValueSource(strings = {" ", "\t", "\n"})
public void testDownloadModelInvalid(String input) {
assertThrows(IOException.class, () -> DownloadUtil.downloadModel(
- input, DownloadUtil.ModelType.SENTENCE_DETECTOR, SentenceModel.class),
- "Invalid model");
+ input, DownloadUtil.ModelType.SENTENCE_DETECTOR, SentenceModel.class),
+ "Invalid model");
}
private static final DownloadUtil.ModelType MT_TOKENIZER = DownloadUtil.ModelType.TOKENIZER;
@@ -123,38 +76,38 @@ public void testDownloadModelInvalid(String input) {
// Note: This needs to be public as JUnit 5 requires it like this.
public static Stream provideURLs() {
return Stream.of(
- Arguments.of("en", DownloadUtil.available_models.get("en").get(MT_TOKENIZER)),
- Arguments.of("fr", DownloadUtil.available_models.get("fr").get(MT_TOKENIZER)),
- Arguments.of("de", DownloadUtil.available_models.get("de").get(MT_TOKENIZER)),
- Arguments.of("it", DownloadUtil.available_models.get("it").get(MT_TOKENIZER)),
- Arguments.of("nl", DownloadUtil.available_models.get("nl").get(MT_TOKENIZER)),
- Arguments.of("bg", DownloadUtil.available_models.get("bg").get(MT_TOKENIZER)),
- Arguments.of("ca", DownloadUtil.available_models.get("ca").get(MT_TOKENIZER)),
- Arguments.of("cs", DownloadUtil.available_models.get("cs").get(MT_TOKENIZER)),
- Arguments.of("da", DownloadUtil.available_models.get("da").get(MT_TOKENIZER)),
- Arguments.of("el", DownloadUtil.available_models.get("el").get(MT_TOKENIZER)),
- Arguments.of("es", DownloadUtil.available_models.get("es").get(MT_TOKENIZER)),
- Arguments.of("et", DownloadUtil.available_models.get("et").get(MT_TOKENIZER)),
- Arguments.of("eu", DownloadUtil.available_models.get("eu").get(MT_TOKENIZER)),
- Arguments.of("fi", DownloadUtil.available_models.get("fi").get(MT_TOKENIZER)),
- Arguments.of("hr", DownloadUtil.available_models.get("hr").get(MT_TOKENIZER)),
- Arguments.of("hy", DownloadUtil.available_models.get("hy").get(MT_TOKENIZER)),
- Arguments.of("is", DownloadUtil.available_models.get("is").get(MT_TOKENIZER)),
- Arguments.of("ka", DownloadUtil.available_models.get("ka").get(MT_TOKENIZER)),
- Arguments.of("kk", DownloadUtil.available_models.get("kk").get(MT_TOKENIZER)),
- Arguments.of("ko", DownloadUtil.available_models.get("ko").get(MT_TOKENIZER)),
- Arguments.of("lv", DownloadUtil.available_models.get("lv").get(MT_TOKENIZER)),
- Arguments.of("no", DownloadUtil.available_models.get("no").get(MT_TOKENIZER)),
- Arguments.of("pl", DownloadUtil.available_models.get("pl").get(MT_TOKENIZER)),
- Arguments.of("pt", DownloadUtil.available_models.get("pt").get(MT_TOKENIZER)),
- Arguments.of("ro", DownloadUtil.available_models.get("ro").get(MT_TOKENIZER)),
- Arguments.of("ru", DownloadUtil.available_models.get("ru").get(MT_TOKENIZER)),
- Arguments.of("sk", DownloadUtil.available_models.get("sk").get(MT_TOKENIZER)),
- Arguments.of("sl", DownloadUtil.available_models.get("sl").get(MT_TOKENIZER)),
- Arguments.of("sr", DownloadUtil.available_models.get("sr").get(MT_TOKENIZER)),
- Arguments.of("sv", DownloadUtil.available_models.get("sv").get(MT_TOKENIZER)),
- Arguments.of("tr", DownloadUtil.available_models.get("tr").get(MT_TOKENIZER)),
- Arguments.of("uk", DownloadUtil.available_models.get("uk").get(MT_TOKENIZER))
+ Arguments.of("en", DownloadUtil.getAvailableModels().get("en").get(MT_TOKENIZER)),
+ Arguments.of("fr", DownloadUtil.getAvailableModels().get("fr").get(MT_TOKENIZER)),
+ Arguments.of("de", DownloadUtil.getAvailableModels().get("de").get(MT_TOKENIZER)),
+ Arguments.of("it", DownloadUtil.getAvailableModels().get("it").get(MT_TOKENIZER)),
+ Arguments.of("nl", DownloadUtil.getAvailableModels().get("nl").get(MT_TOKENIZER)),
+ Arguments.of("bg", DownloadUtil.getAvailableModels().get("bg").get(MT_TOKENIZER)),
+ Arguments.of("ca", DownloadUtil.getAvailableModels().get("ca").get(MT_TOKENIZER)),
+ Arguments.of("cs", DownloadUtil.getAvailableModels().get("cs").get(MT_TOKENIZER)),
+ Arguments.of("da", DownloadUtil.getAvailableModels().get("da").get(MT_TOKENIZER)),
+ Arguments.of("el", DownloadUtil.getAvailableModels().get("el").get(MT_TOKENIZER)),
+ Arguments.of("es", DownloadUtil.getAvailableModels().get("es").get(MT_TOKENIZER)),
+ Arguments.of("et", DownloadUtil.getAvailableModels().get("et").get(MT_TOKENIZER)),
+ Arguments.of("eu", DownloadUtil.getAvailableModels().get("eu").get(MT_TOKENIZER)),
+ Arguments.of("fi", DownloadUtil.getAvailableModels().get("fi").get(MT_TOKENIZER)),
+ Arguments.of("hr", DownloadUtil.getAvailableModels().get("hr").get(MT_TOKENIZER)),
+ Arguments.of("hy", DownloadUtil.getAvailableModels().get("hy").get(MT_TOKENIZER)),
+ Arguments.of("is", DownloadUtil.getAvailableModels().get("is").get(MT_TOKENIZER)),
+ Arguments.of("ka", DownloadUtil.getAvailableModels().get("ka").get(MT_TOKENIZER)),
+ Arguments.of("kk", DownloadUtil.getAvailableModels().get("kk").get(MT_TOKENIZER)),
+ Arguments.of("ko", DownloadUtil.getAvailableModels().get("ko").get(MT_TOKENIZER)),
+ Arguments.of("lv", DownloadUtil.getAvailableModels().get("lv").get(MT_TOKENIZER)),
+ Arguments.of("no", DownloadUtil.getAvailableModels().get("no").get(MT_TOKENIZER)),
+ Arguments.of("pl", DownloadUtil.getAvailableModels().get("pl").get(MT_TOKENIZER)),
+ Arguments.of("pt", DownloadUtil.getAvailableModels().get("pt").get(MT_TOKENIZER)),
+ Arguments.of("ro", DownloadUtil.getAvailableModels().get("ro").get(MT_TOKENIZER)),
+ Arguments.of("ru", DownloadUtil.getAvailableModels().get("ru").get(MT_TOKENIZER)),
+ Arguments.of("sk", DownloadUtil.getAvailableModels().get("sk").get(MT_TOKENIZER)),
+ Arguments.of("sl", DownloadUtil.getAvailableModels().get("sl").get(MT_TOKENIZER)),
+ Arguments.of("sr", DownloadUtil.getAvailableModels().get("sr").get(MT_TOKENIZER)),
+ Arguments.of("sv", DownloadUtil.getAvailableModels().get("sv").get(MT_TOKENIZER)),
+ Arguments.of("tr", DownloadUtil.getAvailableModels().get("tr").get(MT_TOKENIZER)),
+ Arguments.of("uk", DownloadUtil.getAvailableModels().get("uk").get(MT_TOKENIZER))
);
}
}
diff --git a/opennlp-tools/src/test/resources/logback-test.xml b/opennlp-tools/src/test/resources/logback-test.xml
new file mode 100644
index 000000000..b3cbcf203
--- /dev/null
+++ b/opennlp-tools/src/test/resources/logback-test.xml
@@ -0,0 +1,36 @@
+
+
+
+
+
+
+ %date{HH:mm:ss.SSS} [%thread] %-5level %class{36}.%method:%line - %msg%n
+
+
+
+
+
+
+
+
+
+
\ No newline at end of file
diff --git a/pom.xml b/pom.xml
index f5a241add..6c8055a12 100644
--- a/pom.xml
+++ b/pom.xml
@@ -175,9 +175,10 @@
1.20.0
2.0.16
2.24.2
+ 2.10.0
1.37
4.8.179
-
+
1.1.0
1.0C