Skip to content

Commit dbaf97f

Browse files
authored
ecj: enable redundantSpecificationOfTypeArguments and fix issues (#14965)
One of the easier ECJ checks to enable and fix: although a bit cosmetic it is also the least risky. Makes the code look a little less java 5 and a little more java 24, especially if we address other ECJ issues.
1 parent 500efeb commit dbaf97f

File tree

141 files changed

+224
-238
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

141 files changed

+224
-238
lines changed

gradle/validation/ecj-lint/ecj.javadocs.prefs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -95,7 +95,7 @@ org.eclipse.jdt.core.compiler.problem.potentiallyUnclosedCloseable=ignore
9595
org.eclipse.jdt.core.compiler.problem.rawTypeReference=error
9696
org.eclipse.jdt.core.compiler.problem.redundantNullAnnotation=error
9797
org.eclipse.jdt.core.compiler.problem.redundantNullCheck=error
98-
org.eclipse.jdt.core.compiler.problem.redundantSpecificationOfTypeArguments=ignore
98+
org.eclipse.jdt.core.compiler.problem.redundantSpecificationOfTypeArguments=error
9999
org.eclipse.jdt.core.compiler.problem.redundantSuperinterface=error
100100
org.eclipse.jdt.core.compiler.problem.reportMethodCanBePotentiallyStatic=ignore
101101
org.eclipse.jdt.core.compiler.problem.reportMethodCanBeStatic=ignore

lucene/analysis/common/src/java/org/apache/lucene/analysis/morph/ViterbiNBest.java

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -61,7 +61,7 @@ protected ViterbiNBest(
6161
protected final void backtraceNBest(final Position endPosData, final boolean useEOS)
6262
throws IOException {
6363
if (lattice == null) {
64-
lattice = new Lattice<U>();
64+
lattice = new Lattice<>();
6565
}
6666

6767
final int endPos = endPosData.getPos();

lucene/analysis/common/src/java/org/apache/lucene/analysis/synonym/SynonymGraphFilter.java

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -118,7 +118,7 @@ public final class SynonymGraphFilter extends TokenFilter {
118118
private int lookaheadNextWrite;
119119

120120
private RollingBuffer<BufferedInputToken> lookahead =
121-
new RollingBuffer<BufferedInputToken>() {
121+
new RollingBuffer<>() {
122122
@Override
123123
protected BufferedInputToken newInstance() {
124124
return new BufferedInputToken();

lucene/analysis/common/src/test/org/apache/lucene/analysis/core/TestFactories.java

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -157,7 +157,7 @@ private AbstractAnalysisFactory initialize(Class<? extends AbstractAnalysisFacto
157157

158158
// some silly classes just so we can use checkRandomData
159159
private TokenizerFactory assertingTokenizer =
160-
new TokenizerFactory(new HashMap<String, String>()) {
160+
new TokenizerFactory(new HashMap<>()) {
161161
@Override
162162
public MockTokenizer create(AttributeFactory factory) {
163163
return new MockTokenizer(factory);

lucene/analysis/common/src/test/org/apache/lucene/analysis/minhash/TestMinHashFilter.java

Lines changed: 7 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -75,8 +75,8 @@ public void testHashOrder() {
7575
assertTrue(MinHashFilter.isLessThanUnsigned(1L, 2L));
7676
assertTrue(MinHashFilter.isLessThanUnsigned(Long.MAX_VALUE, Long.MIN_VALUE));
7777

78-
FixedSizeTreeSet<LongPair> minSet = new FixedSizeTreeSet<LongPair>(500);
79-
HashSet<LongPair> unadded = new HashSet<LongPair>();
78+
FixedSizeTreeSet<LongPair> minSet = new FixedSizeTreeSet<>(500);
79+
HashSet<LongPair> unadded = new HashSet<>();
8080
for (int i = 0; i < 100; i++) {
8181
LongPair hash = new LongPair();
8282
MinHashFilter.murmurhash3_x64_128(MinHashFilter.getBytes(i), 0, 4, 0, hash);
@@ -100,9 +100,9 @@ public void testHashOrder() {
100100
}
101101

102102
public void testCollisions() {
103-
HashSet<LongPair> collisionDetection = new HashSet<LongPair>();
104-
HashSet<LongPair> unadded = new HashSet<LongPair>();
105-
FixedSizeTreeSet<LongPair> minSet = new FixedSizeTreeSet<LongPair>(500);
103+
HashSet<LongPair> collisionDetection = new HashSet<>();
104+
HashSet<LongPair> unadded = new HashSet<>();
105+
FixedSizeTreeSet<LongPair> minSet = new FixedSizeTreeSet<>(500);
106106
int numElements = TEST_NIGHTLY ? 1000000 : 10000;
107107
for (int i = 0; i < numElements; i++) {
108108
LongPair hash = new LongPair();
@@ -139,8 +139,8 @@ public void testCollisions() {
139139

140140
@Test
141141
public void testHashNotRepeated() {
142-
FixedSizeTreeSet<LongPair> minSet = new FixedSizeTreeSet<LongPair>(500);
143-
HashSet<LongPair> unadded = new HashSet<LongPair>();
142+
FixedSizeTreeSet<LongPair> minSet = new FixedSizeTreeSet<>(500);
143+
HashSet<LongPair> unadded = new HashSet<>();
144144
for (int i = 0; i < 10000; i++) {
145145
LongPair hash = new LongPair();
146146
MinHashFilter.murmurhash3_x64_128(MinHashFilter.getBytes(i), 0, 4, 0, hash);

lucene/analysis/icu/src/test/org/apache/lucene/analysis/icu/TestICUFoldingFilterFactory.java

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -28,15 +28,15 @@ public class TestICUFoldingFilterFactory extends BaseTokenStreamTestCase {
2828
/** basic tests to ensure the folding is working */
2929
public void test() throws Exception {
3030
Reader reader = new StringReader("Résumé");
31-
ICUFoldingFilterFactory factory = new ICUFoldingFilterFactory(new HashMap<String, String>());
31+
ICUFoldingFilterFactory factory = new ICUFoldingFilterFactory(new HashMap<>());
3232
TokenStream stream = whitespaceMockTokenizer(reader);
3333
stream = factory.create(stream);
3434
assertTokenStreamContents(stream, new String[] {"resume"});
3535
}
3636

3737
/** test to ensure the filter parameter is working */
3838
public void testFilter() throws Exception {
39-
HashMap<String, String> args = new HashMap<String, String>();
39+
HashMap<String, String> args = new HashMap<>();
4040
args.put("filter", "[^ö]");
4141
ICUFoldingFilterFactory factory = new ICUFoldingFilterFactory(args);
4242

lucene/analysis/icu/src/test/org/apache/lucene/analysis/icu/TestICUNormalizer2CharFilterFactory.java

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -29,8 +29,7 @@ public class TestICUNormalizer2CharFilterFactory extends BaseTokenStreamTestCase
2929
/** Test nfkc_cf defaults */
3030
public void testDefaults() throws Exception {
3131
Reader reader = new StringReader("This is a Test");
32-
ICUNormalizer2CharFilterFactory factory =
33-
new ICUNormalizer2CharFilterFactory(new HashMap<String, String>());
32+
ICUNormalizer2CharFilterFactory factory = new ICUNormalizer2CharFilterFactory(new HashMap<>());
3433
reader = factory.create(reader);
3534
TokenStream stream = whitespaceMockTokenizer(reader);
3635
assertTokenStreamContents(stream, new String[] {"this", "is", "a", "test"});

lucene/analysis/icu/src/test/org/apache/lucene/analysis/icu/TestICUNormalizer2FilterFactory.java

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -29,8 +29,7 @@ public class TestICUNormalizer2FilterFactory extends BaseTokenStreamTestCase {
2929
/** Test nfkc_cf defaults */
3030
public void testDefaults() throws Exception {
3131
Reader reader = new StringReader("This is a Test");
32-
ICUNormalizer2FilterFactory factory =
33-
new ICUNormalizer2FilterFactory(new HashMap<String, String>());
32+
ICUNormalizer2FilterFactory factory = new ICUNormalizer2FilterFactory(new HashMap<>());
3433
TokenStream stream = whitespaceMockTokenizer(reader);
3534
stream = factory.create(stream);
3635
assertTokenStreamContents(stream, new String[] {"this", "is", "a", "test"});

lucene/analysis/icu/src/test/org/apache/lucene/analysis/icu/segmentation/TestICUTokenizerFactory.java

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -28,7 +28,7 @@
2828
public class TestICUTokenizerFactory extends BaseTokenStreamTestCase {
2929
public void testMixedText() throws Exception {
3030
Reader reader = new StringReader("การที่ได้ต้องแสดงว่างานดี This is a test ກວ່າດອກ");
31-
ICUTokenizerFactory factory = new ICUTokenizerFactory(new HashMap<String, String>());
31+
ICUTokenizerFactory factory = new ICUTokenizerFactory(new HashMap<>());
3232
factory.inform(new ClasspathResourceLoader(getClass()));
3333
Tokenizer stream = factory.create(newAttributeFactory());
3434
stream.setReader(reader);

lucene/analysis/kuromoji/src/test/org/apache/lucene/analysis/ja/TestFactories.java

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -155,7 +155,7 @@ private AbstractAnalysisFactory initialize(Class<? extends AbstractAnalysisFacto
155155

156156
// some silly classes just so we can use checkRandomData
157157
private TokenizerFactory assertingTokenizer =
158-
new TokenizerFactory(new HashMap<String, String>()) {
158+
new TokenizerFactory(new HashMap<>()) {
159159
@Override
160160
public MockTokenizer create(AttributeFactory factory) {
161161
return new MockTokenizer(factory);

0 commit comments

Comments
 (0)