40
40
import java .util .concurrent .CountDownLatch ;
41
41
import java .util .concurrent .atomic .AtomicReference ;
42
42
43
+ import static org .elasticsearch .index .mapper .MapperService .INDEX_MAPPING_DEPTH_LIMIT_SETTING ;
43
44
import static org .elasticsearch .index .mapper .MapperService .INDEX_MAPPING_TOTAL_FIELDS_LIMIT_SETTING ;
44
45
import static org .elasticsearch .test .hamcrest .ElasticsearchAssertions .assertAcked ;
45
46
import static org .elasticsearch .test .hamcrest .ElasticsearchAssertions .assertSearchHits ;
@@ -131,9 +132,11 @@ public void run() {
131
132
}
132
133
133
134
public void testPreflightCheckAvoidsMaster () throws InterruptedException {
134
- createIndex ("index" , Settings .builder ().put (INDEX_MAPPING_TOTAL_FIELDS_LIMIT_SETTING .getKey (), 2 ).build ());
135
+ // can't use INDEX_MAPPING_TOTAL_FIELDS_LIMIT_SETTING as a check here, as that is already checked at parse time,
136
+ // see testTotalFieldsLimitForDynamicMappingsUpdateCheckedAtDocumentParseTime
137
+ createIndex ("index" , Settings .builder ().put (INDEX_MAPPING_DEPTH_LIMIT_SETTING .getKey (), 2 ).build ());
135
138
ensureGreen ("index" );
136
- client ().prepareIndex ("index" ).setId ("1" ).setSource ("field1" , " value1" ).get ();
139
+ client ().prepareIndex ("index" ).setId ("1" ).setSource ("field1" , Map . of ( "field2" , " value1") ).get ();
137
140
138
141
final CountDownLatch masterBlockedLatch = new CountDownLatch (1 );
139
142
final CountDownLatch indexingCompletedLatch = new CountDownLatch (1 );
@@ -154,11 +157,49 @@ public void onFailure(String source, Exception e) {
154
157
});
155
158
156
159
masterBlockedLatch .await ();
157
- final IndexRequestBuilder indexRequestBuilder = client ().prepareIndex ("index" ).setId ("2" ).setSource ("field2" , "value2" );
160
+ final IndexRequestBuilder indexRequestBuilder = client ().prepareIndex ("index" ).setId ("2" ).setSource ("field1" ,
161
+ Map .of ("field3" , Map .of ("field4" , "value2" )));
158
162
try {
159
163
assertThat (
160
164
expectThrows (IllegalArgumentException .class , () -> indexRequestBuilder .get (TimeValue .timeValueSeconds (10 ))).getMessage (),
161
- Matchers .containsString ("Limit of total fields [2] has been exceeded" ));
165
+ Matchers .containsString ("Limit of mapping depth [2] has been exceeded due to object field [field1.field3]" ));
166
+ } finally {
167
+ indexingCompletedLatch .countDown ();
168
+ }
169
+ }
170
+
171
+ public void testTotalFieldsLimitForDynamicMappingsUpdateCheckedAtDocumentParseTime () throws InterruptedException {
172
+ createIndex ("index" , Settings .builder ().put (INDEX_MAPPING_TOTAL_FIELDS_LIMIT_SETTING .getKey (), 2 ).build ());
173
+ ensureGreen ("index" );
174
+ client ().prepareIndex ("index" ).setId ("1" ).setSource ("field1" , "value1" ).get ();
175
+
176
+ final CountDownLatch masterBlockedLatch = new CountDownLatch (1 );
177
+ final CountDownLatch indexingCompletedLatch = new CountDownLatch (1 );
178
+
179
+ internalCluster ().getInstance (ClusterService .class , internalCluster ().getMasterName ()).submitStateUpdateTask ("block-state-updates" ,
180
+ new ClusterStateUpdateTask () {
181
+ @ Override
182
+ public ClusterState execute (ClusterState currentState ) throws Exception {
183
+ masterBlockedLatch .countDown ();
184
+ indexingCompletedLatch .await ();
185
+ return currentState ;
186
+ }
187
+
188
+ @ Override
189
+ public void onFailure (String source , Exception e ) {
190
+ throw new AssertionError ("unexpected" , e );
191
+ }
192
+ });
193
+
194
+ masterBlockedLatch .await ();
195
+ final IndexRequestBuilder indexRequestBuilder = client ().prepareIndex ("index" ).setId ("2" ).setSource ("field2" , "value2" );
196
+ try {
197
+ Exception e = expectThrows (MapperParsingException .class , () -> indexRequestBuilder .get (TimeValue .timeValueSeconds (10 )));
198
+ assertThat (e .getMessage (),
199
+ Matchers .containsString ("failed to parse" ));
200
+ assertThat (e .getCause (), instanceOf (IllegalArgumentException .class ));
201
+ assertThat (e .getCause ().getMessage (),
202
+ Matchers .containsString ("Limit of total fields [2] has been exceeded while adding new fields [1]" ));
162
203
} finally {
163
204
indexingCompletedLatch .countDown ();
164
205
}
0 commit comments