Skip to content

Commit

Permalink
Browse files Browse the repository at this point in the history
… into refreshIntervalUpdate
  • Loading branch information
Shubh Sahu committed Apr 11, 2024
2 parents 29cb727 + 7103e56 commit 1718d64
Show file tree
Hide file tree
Showing 49 changed files with 2,226 additions and 455 deletions.
3 changes: 2 additions & 1 deletion CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -18,11 +18,12 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/),
- Reject Resize index requests (i.e, split, shrink and clone), While DocRep to SegRep migration is in progress.([#12686](https://github.com/opensearch-project/OpenSearch/pull/12686))
- [Remote Store] Add capability of doing refresh as determined by the translog ([#12992](https://github.com/opensearch-project/OpenSearch/pull/12992))
- Add support for more than one protocol for transport ([#12967](https://github.com/opensearch-project/OpenSearch/pull/12967))
- Add changes for overriding remote store and replication settings during snapshot restore. ([#11868](https://github.com/opensearch-project/OpenSearch/pull/11868))

### Dependencies
- Bump `org.apache.commons:commons-configuration2` from 2.10.0 to 2.10.1 ([#12896](https://github.com/opensearch-project/OpenSearch/pull/12896))
- Bump `asm` from 9.6 to 9.7 ([#12908](https://github.com/opensearch-project/OpenSearch/pull/12908))
- Bump `net.minidev:json-smart` from 2.5.0 to 2.5.1 ([#12893](https://github.com/opensearch-project/OpenSearch/pull/12893))
- Bump `net.minidev:json-smart` from 2.5.0 to 2.5.1 ([#12893](https://github.com/opensearch-project/OpenSearch/pull/12893), [#13117](https://github.com/opensearch-project/OpenSearch/pull/13117))
- Bump `netty` from 4.1.107.Final to 4.1.108.Final ([#12924](https://github.com/opensearch-project/OpenSearch/pull/12924))
- Bump `commons-io:commons-io` from 2.15.1 to 2.16.0 ([#12996](https://github.com/opensearch-project/OpenSearch/pull/12996), [#12998](https://github.com/opensearch-project/OpenSearch/pull/12998), [#12999](https://github.com/opensearch-project/OpenSearch/pull/12999))
- Bump `org.apache.commons:commons-compress` from 1.24.0 to 1.26.1 ([#12627](https://github.com/opensearch-project/OpenSearch/pull/12627))
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -54,15 +54,19 @@
import org.opensearch.action.search.SearchScrollRequest;
import org.opensearch.client.core.CountRequest;
import org.opensearch.client.core.CountResponse;
import org.opensearch.common.geo.ShapeRelation;
import org.opensearch.common.unit.TimeValue;
import org.opensearch.common.xcontent.XContentFactory;
import org.opensearch.core.common.bytes.BytesReference;
import org.opensearch.core.rest.RestStatus;
import org.opensearch.core.xcontent.MediaTypeRegistry;
import org.opensearch.core.xcontent.XContentBuilder;
import org.opensearch.geometry.Rectangle;
import org.opensearch.index.query.GeoShapeQueryBuilder;
import org.opensearch.index.query.MatchQueryBuilder;
import org.opensearch.index.query.QueryBuilder;
import org.opensearch.index.query.QueryBuilders;
import org.opensearch.index.query.RangeQueryBuilder;
import org.opensearch.index.query.ScriptQueryBuilder;
import org.opensearch.index.query.TermsQueryBuilder;
import org.opensearch.join.aggregations.Children;
Expand Down Expand Up @@ -102,6 +106,8 @@
import org.opensearch.search.suggest.Suggest;
import org.opensearch.search.suggest.SuggestBuilder;
import org.opensearch.search.suggest.phrase.PhraseSuggestionBuilder;
import org.joda.time.DateTime;
import org.joda.time.DateTimeZone;
import org.hamcrest.Matchers;
import org.junit.Before;

Expand All @@ -116,6 +122,7 @@
import java.util.concurrent.TimeUnit;

import static org.opensearch.common.xcontent.XContentFactory.jsonBuilder;
import static org.opensearch.index.query.QueryBuilders.geoShapeQuery;
import static org.opensearch.test.hamcrest.OpenSearchAssertions.assertToXContentEquivalent;
import static org.hamcrest.Matchers.arrayContaining;
import static org.hamcrest.Matchers.both;
Expand Down Expand Up @@ -764,6 +771,228 @@ public void testSearchWithWeirdScriptFields() throws Exception {
}
}

public void testSearchWithDerivedFields() throws Exception {
// Just testing DerivedField definition from SearchSourceBuilder derivedField()
// We are not testing the full functionality here
Request doc = new Request("PUT", "test/_doc/1");
doc.setJsonEntity("{\"field\":\"value\"}");
client().performRequest(doc);
client().performRequest(new Request("POST", "/test/_refresh"));
// Keyword field
{
SearchRequest searchRequest = new SearchRequest("test").source(
SearchSourceBuilder.searchSource()
.derivedField("result", "keyword", new Script("emit(params._source[\"field\"])"))
.fetchField("result")
.query(new TermsQueryBuilder("result", "value"))
);
SearchResponse searchResponse = execute(searchRequest, highLevelClient()::search, highLevelClient()::searchAsync);
SearchHit searchHit = searchResponse.getHits().getAt(0);
List<Object> values = searchHit.getFields().get("result").getValues();
assertNotNull(values);
assertEquals(1, values.size());
assertEquals("value", values.get(0));

// multi valued
searchRequest = new SearchRequest("test").source(
SearchSourceBuilder.searchSource()
.derivedField(
"result",
"keyword",
new Script("emit(params._source[\"field\"]);emit(params._source[\"field\"] + \"_2\")")
)
.query(new TermsQueryBuilder("result", "value_2"))
.fetchField("result")
);
searchResponse = execute(searchRequest, highLevelClient()::search, highLevelClient()::searchAsync);
searchHit = searchResponse.getHits().getAt(0);
values = searchHit.getFields().get("result").getValues();
assertNotNull(values);
assertEquals(2, values.size());
assertEquals("value", values.get(0));
assertEquals("value_2", values.get(1));
}
// Boolean field
{
SearchRequest searchRequest = new SearchRequest("test").source(
SearchSourceBuilder.searchSource()
.derivedField("result", "boolean", new Script("emit(((String)params._source[\"field\"]).equals(\"value\"))"))
.query(new TermsQueryBuilder("result", "true"))
.fetchField("result")
);
SearchResponse searchResponse = execute(searchRequest, highLevelClient()::search, highLevelClient()::searchAsync);
SearchHit searchHit = searchResponse.getHits().getAt(0);
List<Object> values = searchHit.getFields().get("result").getValues();
assertNotNull(values);
assertEquals(1, values.size());
assertEquals(true, values.get(0));
}
// Long field
{
SearchRequest searchRequest = new SearchRequest("test").source(
SearchSourceBuilder.searchSource()
.derivedField("result", "long", new Script("emit(Long.MAX_VALUE)"))
.query(new RangeQueryBuilder("result").from(Long.MAX_VALUE - 1).to(Long.MAX_VALUE))
.fetchField("result")
);

SearchResponse searchResponse = execute(searchRequest, highLevelClient()::search, highLevelClient()::searchAsync);
SearchHit searchHit = searchResponse.getHits().getAt(0);
List<Object> values = searchHit.getFields().get("result").getValues();
assertNotNull(values);
assertEquals(1, values.size());
assertEquals(Long.MAX_VALUE, values.get(0));

// multi-valued
searchRequest = new SearchRequest("test").source(
SearchSourceBuilder.searchSource()
.derivedField("result", "long", new Script("emit(Long.MAX_VALUE); emit(Long.MIN_VALUE);"))
.query(new RangeQueryBuilder("result").from(Long.MIN_VALUE).to(Long.MIN_VALUE + 1))
.fetchField("result")
);

searchResponse = execute(searchRequest, highLevelClient()::search, highLevelClient()::searchAsync);
searchHit = searchResponse.getHits().getAt(0);
values = searchHit.getFields().get("result").getValues();
assertNotNull(values);
assertEquals(2, values.size());
assertEquals(Long.MAX_VALUE, values.get(0));
assertEquals(Long.MIN_VALUE, values.get(1));
}
// Double field
{
SearchRequest searchRequest = new SearchRequest("test").source(
SearchSourceBuilder.searchSource()
.derivedField("result", "double", new Script("emit(Double.MAX_VALUE)"))
.query(new RangeQueryBuilder("result").from(Double.MAX_VALUE - 1).to(Double.MAX_VALUE))
.fetchField("result")
);
SearchResponse searchResponse = execute(searchRequest, highLevelClient()::search, highLevelClient()::searchAsync);
SearchHit searchHit = searchResponse.getHits().getAt(0);
List<Object> values = searchHit.getFields().get("result").getValues();
assertNotNull(values);
assertEquals(1, values.size());
assertEquals(Double.MAX_VALUE, values.get(0));

// multi-valued
searchRequest = new SearchRequest("test").source(
SearchSourceBuilder.searchSource()
.derivedField("result", "double", new Script("emit(Double.MAX_VALUE); emit(Double.MIN_VALUE);"))
.query(new RangeQueryBuilder("result").from(Double.MIN_VALUE).to(Double.MIN_VALUE + 1))
.fetchField("result")
);

searchResponse = execute(searchRequest, highLevelClient()::search, highLevelClient()::searchAsync);
searchHit = searchResponse.getHits().getAt(0);
values = searchHit.getFields().get("result").getValues();
assertNotNull(values);
assertEquals(2, values.size());
assertEquals(Double.MAX_VALUE, values.get(0));
assertEquals(Double.MIN_VALUE, values.get(1));
}
// Date field
{
DateTime date1 = new DateTime(1990, 12, 29, 0, 0, DateTimeZone.UTC);
DateTime date2 = new DateTime(1990, 12, 30, 0, 0, DateTimeZone.UTC);
SearchRequest searchRequest = new SearchRequest("test").source(
SearchSourceBuilder.searchSource()
.derivedField("result", "date", new Script("emit(" + date1.getMillis() + "L)"))
.query(new RangeQueryBuilder("result").from(date1.toString()).to(date2.toString()))
.fetchField("result")
);

SearchResponse searchResponse = execute(searchRequest, highLevelClient()::search, highLevelClient()::searchAsync);
SearchHit searchHit = searchResponse.getHits().getAt(0);
List<Object> values = searchHit.getFields().get("result").getValues();
assertNotNull(values);
assertEquals(1, values.size());
assertEquals(date1.toString(), values.get(0));

// multi-valued
searchRequest = new SearchRequest("test").source(
SearchSourceBuilder.searchSource()
.derivedField("result", "date", new Script("emit(" + date1.getMillis() + "L); " + "emit(" + date2.getMillis() + "L)"))
.query(new RangeQueryBuilder("result").from(date1.toString()).to(date2.toString()))
.fetchField("result")
);

searchResponse = execute(searchRequest, highLevelClient()::search, highLevelClient()::searchAsync);
searchHit = searchResponse.getHits().getAt(0);
values = searchHit.getFields().get("result").getValues();
assertNotNull(values);
assertEquals(2, values.size());
assertEquals(date1.toString(), values.get(0));
assertEquals(date2.toString(), values.get(1));
}
// Geo field
{
GeoShapeQueryBuilder qb = geoShapeQuery("result", new Rectangle(-35, 35, 35, -35));
qb.relation(ShapeRelation.INTERSECTS);
SearchRequest searchRequest = new SearchRequest("test").source(
SearchSourceBuilder.searchSource()
.derivedField("result", "geo_point", new Script("emit(10.0, 20.0)"))
.query(qb)
.fetchField("result")
);

SearchResponse searchResponse = execute(searchRequest, highLevelClient()::search, highLevelClient()::searchAsync);
SearchHit searchHit = searchResponse.getHits().getAt(0);
List<Object> values = searchHit.getFields().get("result").getValues();
assertNotNull(values);
assertEquals(1, values.size());
assertEquals(10.0, ((HashMap) values.get(0)).get("lat"));
assertEquals(20.0, ((HashMap) values.get(0)).get("lon"));

// multi-valued
searchRequest = new SearchRequest("test").source(
SearchSourceBuilder.searchSource()
.derivedField("result", "geo_point", new Script("emit(10.0, 20.0); emit(20.0, 30.0);"))
.query(qb)
.fetchField("result")
);

searchResponse = execute(searchRequest, highLevelClient()::search, highLevelClient()::searchAsync);
searchHit = searchResponse.getHits().getAt(0);
values = searchHit.getFields().get("result").getValues();
assertNotNull(values);
assertEquals(2, values.size());
assertEquals(10.0, ((HashMap) values.get(0)).get("lat"));
assertEquals(20.0, ((HashMap) values.get(0)).get("lon"));
assertEquals(20.0, ((HashMap) values.get(1)).get("lat"));
assertEquals(30.0, ((HashMap) values.get(1)).get("lon"));
}
// IP field
{
SearchRequest searchRequest = new SearchRequest("test").source(
SearchSourceBuilder.searchSource().derivedField("result", "ip", new Script("emit(\"10.0.0.1\")")).fetchField("result")
);

SearchResponse searchResponse = execute(searchRequest, highLevelClient()::search, highLevelClient()::searchAsync);
SearchHit searchHit = searchResponse.getHits().getAt(0);
List<Object> values = searchHit.getFields().get("result").getValues();
assertNotNull(values);
assertEquals(1, values.size());
assertEquals("10.0.0.1", values.get(0));

// multi-valued
searchRequest = new SearchRequest("test").source(
SearchSourceBuilder.searchSource()
.derivedField("result", "ip", new Script("emit(\"10.0.0.1\"); emit(\"10.0.0.2\");"))
.fetchField("result")
);

searchResponse = execute(searchRequest, highLevelClient()::search, highLevelClient()::searchAsync);
searchHit = searchResponse.getHits().getAt(0);
values = searchHit.getFields().get("result").getValues();
assertNotNull(values);
assertEquals(2, values.size());
assertEquals("10.0.0.1", values.get(0));
assertEquals("10.0.0.2", values.get(1));

}

}

public void testSearchScroll() throws Exception {
for (int i = 0; i < 100; i++) {
XContentBuilder builder = jsonBuilder().startObject().field("field", i).endObject();
Expand Down
2 changes: 1 addition & 1 deletion plugins/repository-hdfs/build.gradle
Original file line number Diff line number Diff line change
Expand Up @@ -81,7 +81,7 @@ dependencies {
api 'javax.servlet:servlet-api:2.5'
api "org.slf4j:slf4j-api:${versions.slf4j}"
api "org.apache.logging.log4j:log4j-slf4j-impl:${versions.log4j}"
api 'net.minidev:json-smart:2.5.0'
api 'net.minidev:json-smart:2.5.1'
api "io.netty:netty-all:${versions.netty}"
implementation "com.fasterxml.woodstox:woodstox-core:${versions.woodstox}"
implementation 'org.codehaus.woodstox:stax2-api:4.2.2'
Expand Down
1 change: 0 additions & 1 deletion plugins/repository-hdfs/licenses/json-smart-2.5.0.jar.sha1

This file was deleted.

1 change: 1 addition & 0 deletions plugins/repository-hdfs/licenses/json-smart-2.5.1.jar.sha1
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
4c11d2808d009132dfbbf947ebf37de6bf266c8e
Original file line number Diff line number Diff line change
Expand Up @@ -54,7 +54,7 @@ public static Map<ShardId, ShardAttributes> prepareRequestMap(String[] indices,
);
for (int shardIdNum = 0; shardIdNum < primaryShardCount; shardIdNum++) {
final ShardId shardId = new ShardId(index, shardIdNum);
shardIdShardAttributesMap.put(shardId, new ShardAttributes(shardId, customDataPath));
shardIdShardAttributesMap.put(shardId, new ShardAttributes(customDataPath));
}
}
return shardIdShardAttributesMap;
Expand Down
Loading

0 comments on commit 1718d64

Please sign in to comment.