Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[pull] main from elastic:main #646

Merged
merged 8 commits into from
Jan 3, 2025
Original file line number Diff line number Diff line change
Expand Up @@ -31,6 +31,7 @@
import org.gradle.api.artifacts.Configuration;
import org.gradle.api.artifacts.dsl.DependencyHandler;
import org.gradle.api.artifacts.type.ArtifactTypeDefinition;
import org.gradle.api.file.FileCollection;
import org.gradle.api.plugins.JavaPluginExtension;
import org.gradle.api.provider.Provider;
import org.gradle.api.specs.Specs;
Expand Down Expand Up @@ -88,8 +89,8 @@ public void apply(Project project) {
Map<String, TaskProvider<?>> versionTasks = versionTasks(project, "destructiveDistroUpgradeTest", buildParams.getBwcVersions());
TaskProvider<Task> destructiveDistroTest = project.getTasks().register("destructiveDistroTest");

Configuration examplePlugin = configureExamplePlugin(project);

Configuration examplePluginConfiguration = configureExamplePlugin(project);
FileCollection examplePluginFileCollection = examplePluginConfiguration;
List<TaskProvider<Test>> windowsTestTasks = new ArrayList<>();
Map<ElasticsearchDistributionType, List<TaskProvider<Test>>> linuxTestTasks = new HashMap<>();

Expand All @@ -102,9 +103,9 @@ public void apply(Project project) {
t2 -> distribution.isDocker() == false || dockerSupport.get().getDockerAvailability().isAvailable()
);
addDistributionSysprop(t, DISTRIBUTION_SYSPROP, distribution::getFilepath);
addDistributionSysprop(t, EXAMPLE_PLUGIN_SYSPROP, () -> examplePlugin.getSingleFile().toString());
addDistributionSysprop(t, EXAMPLE_PLUGIN_SYSPROP, () -> examplePluginFileCollection.getSingleFile().toString());
t.exclude("**/PackageUpgradeTests.class");
}, distribution, examplePlugin.getDependencies());
}, distribution, examplePluginConfiguration.getDependencies());

if (distribution.getPlatform() == Platform.WINDOWS) {
windowsTestTasks.add(destructiveTask);
Expand Down
6 changes: 6 additions & 0 deletions docs/changelog/119054.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,6 @@
pr: 119054
summary: "[Security Solution] allows `kibana_system` user to manage .reindexed-v8-*\
\ Security Solution indices"
area: Authorization
type: enhancement
issues: []
5 changes: 5 additions & 0 deletions docs/changelog/119495.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
pr: 119495
summary: Add mapping for `event_name` for OTel logs
area: Data streams
type: enhancement
issues: []
7 changes: 5 additions & 2 deletions docs/reference/inference/service-elasticsearch.asciidoc
Original file line number Diff line number Diff line change
Expand Up @@ -9,8 +9,11 @@ For the most up-to-date API details, refer to {api-es}/group/endpoint-inference[

Creates an {infer} endpoint to perform an {infer} task with the `elasticsearch` service.

NOTE: If you use the ELSER or the E5 model through the `elasticsearch` service, the API request will automatically download and deploy the model if it isn't downloaded yet.

[NOTE]
====
* Your {es} deployment contains <<default-enpoints,preconfigured ELSER and E5 {infer} endpoints>>, you only need to create the enpoints using the API if you want to customize the settings.
* If you use the ELSER or the E5 model through the `elasticsearch` service, the API request will automatically download and deploy the model if it isn't downloaded yet.
====

[discrete]
[[infer-service-elasticsearch-api-request]]
Expand Down
11 changes: 7 additions & 4 deletions docs/reference/inference/service-elser.asciidoc
Original file line number Diff line number Diff line change
Expand Up @@ -10,14 +10,17 @@ For the most up-to-date API details, refer to {api-es}/group/endpoint-inference[
Creates an {infer} endpoint to perform an {infer} task with the `elser` service.
You can also deploy ELSER by using the <<infer-service-elasticsearch>>.

NOTE: The API request will automatically download and deploy the ELSER model if
it isn't already downloaded.
[NOTE]
====
* Your {es} deployment contains <<default-enpoints,a preconfigured ELSER {infer} endpoint>>, you only need to create the enpoint using the API if you want to customize the settings.
* The API request will automatically download and deploy the ELSER model if it isn't already downloaded.
====

[WARNING]
.Deprecated in 8.16
====
The elser service is deprecated and will be removed in a future release.
Use the <<infer-service-elasticsearch>> instead, with model_id included in the service_settings.
The `elser` service is deprecated and will be removed in a future release.
Use the <<infer-service-elasticsearch>> instead, with `model_id` included in the `service_settings`.
====

[discrete]
Expand Down
12 changes: 6 additions & 6 deletions muted-tests.yml
Original file line number Diff line number Diff line change
Expand Up @@ -241,9 +241,6 @@ tests:
- class: org.elasticsearch.xpack.test.rest.XPackRestIT
method: test {p0=ml/data_frame_analytics_cat_apis/Test cat data frame analytics all jobs with header}
issue: https://github.com/elastic/elasticsearch/issues/119332
- class: org.elasticsearch.xpack.esql.qa.multi_node.EsqlSpecIT
method: test {lookup-join.MvJoinKeyOnTheDataNode ASYNC}
issue: https://github.com/elastic/elasticsearch/issues/119179
- class: org.elasticsearch.smoketest.SmokeTestMultiNodeClientYamlTestSuiteIT
issue: https://github.com/elastic/elasticsearch/issues/119191
- class: org.elasticsearch.xpack.logsdb.qa.LogsDbVersusLogsDbReindexedIntoStandardModeChallengeRestIT
Expand All @@ -252,12 +249,15 @@ tests:
- class: org.elasticsearch.xpack.logsdb.qa.LogsDbVersusLogsDbReindexedIntoStandardModeChallengeRestIT
method: testMatchAllQuery
issue: https://github.com/elastic/elasticsearch/issues/119432
- class: org.elasticsearch.xpack.esql.qa.multi_node.EsqlSpecIT
method: test {lookup-join.MvJoinKeyOnTheDataNode SYNC}
issue: https://github.com/elastic/elasticsearch/issues/119446
- class: org.elasticsearch.xpack.logsdb.qa.LogsDbVersusLogsDbReindexedIntoStandardModeChallengeRestIT
method: testTermsQuery
issue: https://github.com/elastic/elasticsearch/issues/119486
- class: org.elasticsearch.xpack.test.rest.XPackRestIT
method: test {p0=transform/transforms_start_stop/Test start/stop/start transform}
issue: https://github.com/elastic/elasticsearch/issues/119508
- class: org.elasticsearch.xpack.logsdb.qa.LogsDbVersusLogsDbReindexedIntoStandardModeChallengeRestIT
method: testEsqlSource
issue: https://github.com/elastic/elasticsearch/issues/119510

# Examples:
#
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -225,11 +225,19 @@ static RoleDescriptor kibanaSystem(String name) {
RoleDescriptor.IndicesPrivileges.builder().indices("logs-fleet_server*").privileges("read", "delete_index").build(),
// Legacy "Alerts as data" used in Security Solution.
// Kibana user creates these indices; reads / writes to them.
RoleDescriptor.IndicesPrivileges.builder().indices(ReservedRolesStore.ALERTS_LEGACY_INDEX).privileges("all").build(),
RoleDescriptor.IndicesPrivileges.builder()
.indices(ReservedRolesStore.ALERTS_LEGACY_INDEX, ReservedRolesStore.ALERTS_LEGACY_INDEX_REINDEXED_V8)
.privileges("all")
.build(),
// Used in Security Solution for value lists.
// Kibana user creates these indices; reads / writes to them.
RoleDescriptor.IndicesPrivileges.builder()
.indices(ReservedRolesStore.LISTS_INDEX, ReservedRolesStore.LISTS_ITEMS_INDEX)
.indices(
ReservedRolesStore.LISTS_INDEX,
ReservedRolesStore.LISTS_ITEMS_INDEX,
ReservedRolesStore.LISTS_INDEX_REINDEXED_V8,
ReservedRolesStore.LISTS_ITEMS_INDEX_REINDEXED_V8
)
.privileges("all")
.build(),
// "Alerts as data" internal backing indices used in Security Solution,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -43,6 +43,7 @@
public class ReservedRolesStore implements BiConsumer<Set<String>, ActionListener<RoleRetrievalResult>> {
/** "Security Solutions" only legacy signals index */
public static final String ALERTS_LEGACY_INDEX = ".siem-signals*";
public static final String ALERTS_LEGACY_INDEX_REINDEXED_V8 = ".reindexed-v8-siem-signals*";

/** Alerts, Rules, Cases (RAC) index used by multiple solutions */
public static final String ALERTS_BACKING_INDEX = ".internal.alerts*";
Expand All @@ -60,9 +61,11 @@ public class ReservedRolesStore implements BiConsumer<Set<String>, ActionListene

/** "Security Solutions" only lists index for value lists for detections */
public static final String LISTS_INDEX = ".lists-*";
public static final String LISTS_INDEX_REINDEXED_V8 = ".reindexed-v8-lists-*";

/** "Security Solutions" only lists index for value list items for detections */
public static final String LISTS_ITEMS_INDEX = ".items-*";
public static final String LISTS_ITEMS_INDEX_REINDEXED_V8 = ".reindexed-v8-items-*";

/** Index pattern for Universal Profiling */
public static final String UNIVERSAL_PROFILING_ALIASES = "profiling-*";
Expand Down Expand Up @@ -829,7 +832,14 @@ private static RoleDescriptor buildViewerRoleDescriptor() {
.build(),
// Security
RoleDescriptor.IndicesPrivileges.builder()
.indices(ReservedRolesStore.ALERTS_LEGACY_INDEX, ReservedRolesStore.LISTS_INDEX, ReservedRolesStore.LISTS_ITEMS_INDEX)
.indices(
ReservedRolesStore.ALERTS_LEGACY_INDEX,
ReservedRolesStore.LISTS_INDEX,
ReservedRolesStore.LISTS_ITEMS_INDEX,
ReservedRolesStore.ALERTS_LEGACY_INDEX_REINDEXED_V8,
ReservedRolesStore.LISTS_INDEX_REINDEXED_V8,
ReservedRolesStore.LISTS_ITEMS_INDEX_REINDEXED_V8
)
.privileges("read", "view_index_metadata")
.build(),
// Alerts-as-data
Expand Down Expand Up @@ -880,7 +890,14 @@ private static RoleDescriptor buildEditorRoleDescriptor() {
.build(),
// Security
RoleDescriptor.IndicesPrivileges.builder()
.indices(ReservedRolesStore.ALERTS_LEGACY_INDEX, ReservedRolesStore.LISTS_INDEX, ReservedRolesStore.LISTS_ITEMS_INDEX)
.indices(
ReservedRolesStore.ALERTS_LEGACY_INDEX,
ReservedRolesStore.LISTS_INDEX,
ReservedRolesStore.LISTS_ITEMS_INDEX,
ReservedRolesStore.ALERTS_LEGACY_INDEX_REINDEXED_V8,
ReservedRolesStore.LISTS_INDEX_REINDEXED_V8,
ReservedRolesStore.LISTS_ITEMS_INDEX_REINDEXED_V8
)
.privileges("read", "view_index_metadata", "write", "maintenance")
.build(),
// Alerts-as-data
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -613,14 +613,17 @@ public void testKibanaSystemRole() {
".apm-custom-link",
".apm-source-map",
ReservedRolesStore.ALERTS_LEGACY_INDEX + randomAlphaOfLength(randomIntBetween(0, 13)),
ReservedRolesStore.ALERTS_LEGACY_INDEX_REINDEXED_V8 + randomAlphaOfLength(randomIntBetween(0, 13)),
ReservedRolesStore.ALERTS_BACKING_INDEX + randomAlphaOfLength(randomIntBetween(0, 13)),
ReservedRolesStore.ALERTS_BACKING_INDEX_REINDEXED + randomAlphaOfLength(randomIntBetween(0, 13)),
ReservedRolesStore.ALERTS_INDEX_ALIAS + randomAlphaOfLength(randomIntBetween(0, 13)),
ReservedRolesStore.PREVIEW_ALERTS_INDEX_ALIAS + randomAlphaOfLength(randomIntBetween(0, 13)),
ReservedRolesStore.PREVIEW_ALERTS_BACKING_INDEX + randomAlphaOfLength(randomIntBetween(0, 13)),
ReservedRolesStore.PREVIEW_ALERTS_BACKING_INDEX_REINDEXED + randomAlphaOfLength(randomIntBetween(0, 13)),
ReservedRolesStore.LISTS_INDEX + randomAlphaOfLength(randomIntBetween(0, 13)),
ReservedRolesStore.LISTS_INDEX_REINDEXED_V8 + randomAlphaOfLength(randomIntBetween(0, 13)),
ReservedRolesStore.LISTS_ITEMS_INDEX + randomAlphaOfLength(randomIntBetween(0, 13)),
ReservedRolesStore.LISTS_ITEMS_INDEX_REINDEXED_V8 + randomAlphaOfLength(randomIntBetween(0, 13)),
".slo-observability." + randomAlphaOfLength(randomIntBetween(0, 13))
).forEach(index -> assertAllIndicesAccessAllowed(kibanaRole, index));

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -419,6 +419,7 @@ FROM employees
| EVAL language_code = emp_no % 10
| LOOKUP JOIN languages_lookup_non_unique_key ON language_code
| SORT emp_no
| EVAL language_name = MV_SORT(language_name)
| KEEP emp_no, language_code, language_name
;

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,7 @@

import org.elasticsearch.features.FeatureSpecification;
import org.elasticsearch.features.NodeFeature;
import org.elasticsearch.xpack.inference.mapper.SemanticInferenceMetadataFieldsMapper;
import org.elasticsearch.xpack.inference.mapper.SemanticTextFieldMapper;
import org.elasticsearch.xpack.inference.queries.SemanticQueryBuilder;
import org.elasticsearch.xpack.inference.rank.random.RandomRankRetrieverBuilder;
Expand Down Expand Up @@ -48,7 +49,8 @@ public Set<NodeFeature> getTestFeatures() {
SemanticTextFieldMapper.SEMANTIC_TEXT_ALWAYS_EMIT_INFERENCE_ID_FIX,
SEMANTIC_TEXT_HIGHLIGHTER,
SEMANTIC_MATCH_QUERY_REWRITE_INTERCEPTION_SUPPORTED,
SEMANTIC_SPARSE_VECTOR_QUERY_REWRITE_INTERCEPTION_SUPPORTED
SEMANTIC_SPARSE_VECTOR_QUERY_REWRITE_INTERCEPTION_SUPPORTED,
SemanticInferenceMetadataFieldsMapper.EXPLICIT_NULL_FIXES
);
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -39,6 +39,7 @@
import org.elasticsearch.inference.UnparsedModel;
import org.elasticsearch.rest.RestStatus;
import org.elasticsearch.tasks.Task;
import org.elasticsearch.xcontent.XContent;
import org.elasticsearch.xpack.core.inference.results.ChunkedInferenceError;
import org.elasticsearch.xpack.inference.mapper.SemanticTextField;
import org.elasticsearch.xpack.inference.mapper.SemanticTextFieldMapper;
Expand All @@ -50,6 +51,7 @@
import java.util.Collections;
import java.util.Comparator;
import java.util.HashMap;
import java.util.Iterator;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
Expand All @@ -67,6 +69,8 @@
*/
public class ShardBulkInferenceActionFilter implements MappedActionFilter {
protected static final int DEFAULT_BATCH_SIZE = 512;
private static final Object EXPLICIT_NULL = new Object();
private static final ChunkedInference EMPTY_CHUNKED_INFERENCE = new EmptyChunkedInference();

private final ClusterService clusterService;
private final InferenceServiceRegistry inferenceServiceRegistry;
Expand Down Expand Up @@ -393,11 +397,22 @@ private void applyInferenceResponses(BulkItemRequest item, FieldInferenceRespons
for (var entry : response.responses.entrySet()) {
var fieldName = entry.getKey();
var responses = entry.getValue();
var model = responses.get(0).model();
Model model = null;

InferenceFieldMetadata inferenceFieldMetadata = fieldInferenceMap.get(fieldName);
if (inferenceFieldMetadata == null) {
throw new IllegalStateException("No inference field metadata for field [" + fieldName + "]");
}

// ensure that the order in the original field is consistent in case of multiple inputs
Collections.sort(responses, Comparator.comparingInt(FieldInferenceResponse::inputOrder));
Map<String, List<SemanticTextField.Chunk>> chunkMap = new LinkedHashMap<>();
for (var resp : responses) {
// Get the first non-null model from the response list
if (model == null) {
model = resp.model;
}

var lst = chunkMap.computeIfAbsent(resp.sourceField, k -> new ArrayList<>());
lst.addAll(
SemanticTextField.toSemanticTextFieldChunks(
Expand All @@ -409,21 +424,26 @@ private void applyInferenceResponses(BulkItemRequest item, FieldInferenceRespons
)
);
}

List<String> inputs = responses.stream()
.filter(r -> r.sourceField().equals(fieldName))
.map(r -> r.input)
.collect(Collectors.toList());

// The model can be null if we are only processing update requests that clear inference results. This is ok because we will
// merge in the field's existing model settings on the data node.
var result = new SemanticTextField(
useLegacyFormat,
fieldName,
useLegacyFormat ? inputs : null,
new SemanticTextField.InferenceResult(
model.getInferenceEntityId(),
new SemanticTextField.ModelSettings(model),
inferenceFieldMetadata.getInferenceId(),
model != null ? new SemanticTextField.ModelSettings(model) : null,
chunkMap
),
indexRequest.getContentType()
);

if (useLegacyFormat) {
SemanticTextUtils.insertValue(fieldName, newDocMap, result);
} else {
Expand Down Expand Up @@ -490,7 +510,8 @@ private Map<String, List<FieldInferenceRequest>> createFieldInferenceRequests(Bu
} else {
var inferenceMetadataFieldsValue = XContentMapValues.extractValue(
InferenceMetadataFieldsMapper.NAME + "." + field,
docMap
docMap,
EXPLICIT_NULL
);
if (inferenceMetadataFieldsValue != null) {
// Inference has already been computed
Expand All @@ -500,9 +521,22 @@ private Map<String, List<FieldInferenceRequest>> createFieldInferenceRequests(Bu

int order = 0;
for (var sourceField : entry.getSourceFields()) {
// TODO: Detect when the field is provided with an explicit null value
var valueObj = XContentMapValues.extractValue(sourceField, docMap);
if (valueObj == null) {
var valueObj = XContentMapValues.extractValue(sourceField, docMap, EXPLICIT_NULL);
if (useLegacyFormat == false && isUpdateRequest && valueObj == EXPLICIT_NULL) {
/**
* It's an update request, and the source field is explicitly set to null,
* so we need to propagate this information to the inference fields metadata
* to overwrite any inference previously computed on the field.
* This ensures that the field is treated as intentionally cleared,
* preventing any unintended carryover of prior inference results.
*/
var slot = ensureResponseAccumulatorSlot(itemIndex);
slot.addOrUpdateResponse(
new FieldInferenceResponse(field, sourceField, null, order++, 0, null, EMPTY_CHUNKED_INFERENCE)
);
continue;
}
if (valueObj == null || valueObj == EXPLICIT_NULL) {
if (isUpdateRequest && useLegacyFormat) {
addInferenceResponseFailure(
item.id(),
Expand Down Expand Up @@ -552,4 +586,11 @@ static IndexRequest getIndexRequestOrNull(DocWriteRequest<?> docWriteRequest) {
return null;
}
}

private static class EmptyChunkedInference implements ChunkedInference {
@Override
public Iterator<Chunk> chunksAsMatchedTextAndByteReference(XContent xcontent) {
return Collections.emptyIterator();
}
}
}
Loading