Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -33,6 +33,7 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/),
- Grant access to testclusters dir for tests ([#19085](https://github.com/opensearch-project/OpenSearch/issues/19085))
- Fix assertion error when collapsing search results with concurrent segment search enabled ([#19053](https://github.com/opensearch-project/OpenSearch/pull/19053))
- Fix skip_unavailable setting changing to default during node drop issue ([#18766](https://github.com/opensearch-project/OpenSearch/pull/18766))
- S1144: Unused "private" methods should be removed ([#18918](https://github.com/opensearch-project/OpenSearch/pull/18918))

### Dependencies
- Bump `com.netflix.nebula.ospackage-base` from 12.0.0 to 12.1.0 ([#19019](https://github.com/opensearch-project/OpenSearch/pull/19019))
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -71,16 +71,6 @@ public TaskListRequest taskType(TaskStatus... taskStatus) {
return this;
}

/**
* Update worker node to filter with in the request
* @param workerNode WorkerNode
* @return ListTaskRequest
*/
private TaskListRequest workerNode(WorkerNode workerNode) {
this.workerNodes = workerNode;
return this;
}

/**
* Update page number to start with when fetching the list of tasks
* @param startPageNumber startPageNumber
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -2665,17 +2665,6 @@ private TieredSpilloverCache<String, String> getTieredSpilloverCache(
return builder.build();
}

private TieredSpilloverCache<String, String> initializeTieredSpilloverCache(
int keyValueSize,
int diskCacheSize,
RemovalListener<ICacheKey<String>, String> removalListener,
Settings settings,
long diskDeliberateDelay

) {
return initializeTieredSpilloverCache(keyValueSize, diskCacheSize, removalListener, settings, diskDeliberateDelay, null, 256);
}

private TieredSpilloverCache<String, String> initializeTieredSpilloverCache(
int keyValueSize,
int diskCacheSize,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,6 @@
import javassist.ClassPool;
import javassist.CtClass;
import javassist.CtConstructor;
import javassist.CtMethod;

/**
* Java agent for bytecode injection of chaos testing
Expand Down Expand Up @@ -75,27 +74,6 @@ private boolean shouldTransform(String className) {
return className.startsWith("org/opensearch/arrow/flight/transport/Flight");
}

private void transformFlightTransport(CtClass ctClass) throws Exception {
CtMethod method = ctClass.getDeclaredMethod("openConnection");
method.insertBefore("org.opensearch.arrow.flight.chaos.ChaosScenario.injectChaos();");
}

private void transformFlightTransportChannel(CtClass ctClass) throws Exception {
CtMethod sendBatch = ctClass.getDeclaredMethod("sendResponseBatch");
sendBatch.insertBefore("org.opensearch.arrow.flight.chaos.ChaosScenario.injectChaos();");

CtMethod complete = ctClass.getDeclaredMethod("completeStream");
complete.insertBefore("org.opensearch.arrow.flight.chaos.ChaosScenario.injectChaos();");
}

private void transformFlightTransportResponse(CtClass ctClass) throws Exception {
CtMethod nextResponse = ctClass.getDeclaredMethod("nextResponse");
nextResponse.insertBefore("org.opensearch.arrow.flight.chaos.ChaosScenario.injectChaos();");

// CtMethod close = ctClass.getDeclaredMethod("close");
// close.insertBefore("org.opensearch.arrow.flight.chaos.ChaosInterceptor.beforeResponseClose();");
}

private void transformFlightServerChannelWithDelay(CtClass ctClass) throws Exception {
CtConstructor[] ctr = ctClass.getConstructors();
ctr[0].insertBefore("org.opensearch.arrow.flight.chaos.ChaosScenario.injectChaos();");
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,6 @@

package org.opensearch.arrow.flight.chaos;

import org.opensearch.transport.stream.StreamErrorCode;
import org.opensearch.transport.stream.StreamException;

import java.util.concurrent.atomic.AtomicBoolean;
Expand Down Expand Up @@ -67,20 +66,6 @@ public static void injectChaos() throws StreamException {
}
}

private static void simulateUnresponsiveness() throws StreamException {
try {
Thread.sleep(timeoutDelayMs);
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
}
throw new StreamException(StreamErrorCode.TIMED_OUT, "Client unresponsive");
}

private static void simulateClientNodeDeath() throws StreamException {
// Simulate node death followed by recovery
throw new StreamException(StreamErrorCode.UNAVAILABLE, "Client node death - connection lost");
}

private static void simulateLongRunningOperation() throws StreamException {
try {
Thread.sleep(timeoutDelayMs);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -35,7 +35,6 @@
import java.util.Map;
import java.util.Objects;
import java.util.Optional;
import java.util.Set;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ExecutorService;
Expand Down Expand Up @@ -247,10 +246,6 @@ private static boolean isValidNode(DiscoveryNode node) {
return node != null && !node.getVersion().before(MIN_SUPPORTED_VERSION) && FeatureFlags.isEnabled(ARROW_STREAMS_SETTING);
}

private Set<String> getCurrentClusterNodes() {
return Objects.requireNonNull(clientConfig.clusterService).state().nodes().getNodes().keySet();
}

@VisibleForTesting
Map<String, ClientHolder> getFlightClients() {
return flightClients;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -1264,35 +1264,6 @@ PersistentCacheManager buildCacheManager() {
}
}

private EhcacheDiskCache.Builder<String, String> createDummyBuilder(String storagePath) throws IOException {
Settings settings = Settings.builder().build();
MockRemovalListener<String, String> removalListener = new MockRemovalListener<>();
ToLongBiFunction<ICacheKey<String>, String> weigher = getWeigher();
try (NodeEnvironment env = newNodeEnvironment(settings)) {
if (storagePath == null || storagePath.isBlank()) {
storagePath = env.nodePaths()[0].path.toString() + "/request_cache";
}
return (EhcacheDiskCache.Builder<String, String>) new EhcacheDiskCache.Builder<String, String>().setThreadPoolAlias(
"ehcacheTest"
)
.setIsEventListenerModeSync(true)
.setStoragePath(storagePath)
.setKeyType(String.class)
.setValueType(String.class)
.setKeySerializer(new StringSerializer())
.setDiskCacheAlias("test1")
.setValueSerializer(new StringSerializer())
.setDimensionNames(List.of(dimensionName))
.setCacheType(CacheType.INDICES_REQUEST_CACHE)
.setSettings(settings)
.setExpireAfterAccess(TimeValue.MAX_VALUE)
.setMaximumWeightInBytes(CACHE_SIZE_IN_BYTES)
.setRemovalListener(removalListener)
.setWeigher(weigher)
.setStatsTrackingEnabled(false);
}
}

private List<String> getRandomDimensions(List<String> dimensionNames) {
Random rand = Randomness.get();
int bound = 3;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -503,12 +503,6 @@ public void testDiskMonitorResetLastRuntimeMilliSecOnlyInFirstCall() throws Exce
}, 30L, TimeUnit.SECONDS);
}

private String populateNode(final String dataNodeName) throws Exception {
final String indexName = randomAlphaOfLength(10).toLowerCase(Locale.ROOT);
createAndPopulateIndex(indexName, dataNodeName);
return indexName;
}

private void createIndex(String indexName, String nodeName, boolean isWarmIndex) throws Exception {
final Settings.Builder indexSettingBuilder = Settings.builder()
.put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, 0)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -83,10 +83,6 @@ protected Settings nodeSettings(int nodeOrdinal) {
return Settings.builder().put(super.nodeSettings(nodeOrdinal)).put(REMOTE_CLUSTER_STATE_ENABLED_SETTING.getKey(), true).build();
}

private void addNewNodes(int dataNodeCount, int clusterManagerNodeCount) {
internalCluster().startNodes(dataNodeCount + clusterManagerNodeCount);
}

private Map<String, Long> initialTestSetup(int shardCount, int replicaCount, int dataNodeCount, int clusterManagerNodeCount) {
prepareCluster(clusterManagerNodeCount, dataNodeCount, INDEX_NAME, replicaCount, shardCount);
Map<String, Long> indexStats = indexData(1, false, INDEX_NAME);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -146,19 +146,6 @@ public NodesInfoRequest removeMetric(String metric) {
return this;
}

/**
* Helper method for adding and removing metrics. Used when deserializing
* a NodesInfoRequest from an ordered list of booleans.
*
* @param addMetric Whether or not to include a metric.
* @param metricName Name of the metric to include or remove.
*/
private void optionallyAddMetric(boolean addMetric, String metricName) {
if (addMetric) {
requestedMetrics.add(metricName);
}
}

@Override
public void writeTo(StreamOutput out) throws IOException {
super.writeTo(out);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -172,17 +172,6 @@ public NodesStatsRequest removeMetric(String metric) {
return this;
}

/**
* Helper method for adding metrics during deserialization.
* @param includeMetric Whether or not to include a metric.
* @param metricName Name of the metric to add.
*/
private void optionallyAddMetric(boolean includeMetric, String metricName) {
if (includeMetric) {
requestedMetrics.add(metricName);
}
}

@Override
public void writeTo(StreamOutput out) throws IOException {
super.writeTo(out);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,6 @@

package org.opensearch.cluster.action.index;

import org.opensearch.OpenSearchException;
import org.opensearch.action.admin.indices.mapping.put.AutoPutMappingAction;
import org.opensearch.action.admin.indices.mapping.put.PutMappingRequest;
import org.opensearch.action.support.clustermanager.ClusterManagerNodeRequest;
Expand All @@ -44,7 +43,6 @@
import org.opensearch.common.settings.Settings;
import org.opensearch.common.unit.TimeValue;
import org.opensearch.common.util.concurrent.RunOnce;
import org.opensearch.common.util.concurrent.UncategorizedExecutionException;
import org.opensearch.core.action.ActionListener;
import org.opensearch.core.index.Index;
import org.opensearch.core.xcontent.MediaTypeRegistry;
Expand Down Expand Up @@ -150,19 +148,6 @@ protected void sendUpdateMapping(Index index, Mapping mappingUpdate, ActionListe
);
}

// todo: this explicit unwrap should not be necessary, but is until guessRootCause is fixed to allow wrapped non-es exception.
private static Exception unwrapException(Exception cause) {
return cause instanceof OpenSearchException ? unwrapEsException((OpenSearchException) cause) : cause;
}

private static RuntimeException unwrapEsException(OpenSearchException esEx) {
Throwable root = esEx.unwrapCause();
if (root instanceof RuntimeException) {
return (RuntimeException) root;
}
return new UncategorizedExecutionException("Failed execution", root);
}

/**
* An adjustable semaphore
*
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,6 @@
import org.apache.logging.log4j.message.ParameterizedMessage;
import org.apache.lucene.codecs.CodecUtil;
import org.apache.lucene.index.CorruptIndexException;
import org.apache.lucene.store.ChecksumIndexInput;
import org.apache.lucene.store.IndexInput;
import org.apache.lucene.store.IndexOutput;

Expand Down Expand Up @@ -97,15 +96,6 @@ private int checkHeader(IndexInput indexInput) throws IOException {
return CodecUtil.checkHeader(indexInput, this.codec, minVersion, this.currentVersion);
}

/**
* Reads footer from file input stream containing checksum.
* The {@link IndexInput#getFilePointer()} should be at the footer start position.
* @param indexInput file input stream
*/
private void checkFooter(ChecksumIndexInput indexInput) throws IOException {
CodecUtil.checkFooter(indexInput);
}

/**
* Writes header with {@code this.codec} and {@code this.currentVersion} to the file output stream
* @param indexOutput file output stream
Expand Down
12 changes: 0 additions & 12 deletions server/src/main/java/org/opensearch/env/NodeEnvironment.java
Original file line number Diff line number Diff line change
Expand Up @@ -677,18 +677,6 @@ public void deleteIndexDirectoryUnderLock(Index index, IndexSettings indexSettin
}
}

private void deleteIndexFileCacheDirectory(Index index) {
final Path indexCachePath = fileCacheNodePath().fileCachePath.resolve(index.getUUID());
logger.trace("deleting index {} file cache directory, path: [{}]", index, indexCachePath);
if (Files.exists(indexCachePath)) {
try {
IOUtils.rm(indexCachePath);
} catch (IOException e) {
logger.error(() -> new ParameterizedMessage("Failed to delete cache path for index {}", index), e);
}
}
}

/**
* Tries to lock all local shards for the given index. If any of the shard locks can't be acquired
* a {@link ShardLockObtainFailedException} is thrown and all previously acquired locks are released.
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -43,7 +43,6 @@
import org.opensearch.indices.store.ShardAttributes;
import org.opensearch.indices.store.TransportNodesListShardStoreMetadataBatch;
import org.opensearch.indices.store.TransportNodesListShardStoreMetadataBatch.NodeStoreFilesMetadata;
import org.opensearch.indices.store.TransportNodesListShardStoreMetadataHelper;
import org.opensearch.indices.store.TransportNodesListShardStoreMetadataHelper.StoreFilesMetadata;
import org.opensearch.telemetry.metrics.noop.NoopMetricsRegistry;

Expand Down Expand Up @@ -838,17 +837,6 @@ protected void removeShard(ShardId shardId) {
this.batchInfo.remove(shardId);
}

private TransportNodesListShardStoreMetadataBatch.NodeStoreFilesMetadata buildEmptyReplicaShardResponse() {
return new TransportNodesListShardStoreMetadataBatch.NodeStoreFilesMetadata(
new TransportNodesListShardStoreMetadataHelper.StoreFilesMetadata(
null,
Store.MetadataSnapshot.EMPTY,
Collections.emptyList()
),
null
);
}

private void removeFromBatch(ShardRouting shard) {
removeShard(shard.shardId());
clearShardFromCache(shard.shardId());
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -1948,13 +1948,6 @@ public void setRemoteStateReadTimeout(TimeValue remoteStateReadTimeout) {
this.remoteStateReadTimeout = remoteStateReadTimeout;
}

private BlobStoreTransferService getBlobStoreTransferService() {
if (blobStoreTransferService == null) {
blobStoreTransferService = new BlobStoreTransferService(getBlobStore(), threadpool);
}
return blobStoreTransferService;
}

Set<String> getAllClusterUUIDs(String clusterName) throws IOException {
Map<String, BlobContainer> clusterUUIDMetadata = clusterUUIDContainer(blobStoreRepository, clusterName).children();
if (clusterUUIDMetadata == null) {
Expand All @@ -1963,22 +1956,6 @@ Set<String> getAllClusterUUIDs(String clusterName) throws IOException {
return Collections.unmodifiableSet(clusterUUIDMetadata.keySet());
}

private Map<String, ClusterMetadataManifest> getLatestManifestForAllClusterUUIDs(String clusterName, Set<String> clusterUUIDs) {
Map<String, ClusterMetadataManifest> manifestsByClusterUUID = new HashMap<>();
for (String clusterUUID : clusterUUIDs) {
try {
Optional<ClusterMetadataManifest> manifest = getLatestClusterMetadataManifest(clusterName, clusterUUID);
manifest.ifPresent(clusterMetadataManifest -> manifestsByClusterUUID.put(clusterUUID, clusterMetadataManifest));
} catch (Exception e) {
throw new IllegalStateException(
String.format(Locale.ROOT, "Exception in fetching manifest for clusterUUID: %s", clusterUUID),
e
);
}
}
return manifestsByClusterUUID;
}

/**
* This method creates a valid cluster UUID chain.
*
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -1102,18 +1102,6 @@ private StarTreeDocument createAggregatedDocs(InMemoryTreeNode node) throws IOEx
return aggregatedStarTreeDocument;
}

/**
* Handles the dimension of date time field type
*
* @param fieldName name of the field
* @param val value of the field
* @return returns the converted dimension of the field to a particular granularity
*/
private long handleDateDimension(final String fieldName, final long val) {
// TODO: handle timestamp granularity
return val;
}

public void close() throws IOException {

}
Expand Down
Loading
Loading