[go: nahoru, domu]

Skip to content

Commit

Permalink
Migrate from Trove to Hppc.
Browse files Browse the repository at this point in the history
  • Loading branch information
martijnvg committed Oct 3, 2013
1 parent 373e64b commit 088e05b
Show file tree
Hide file tree
Showing 98 changed files with 1,124 additions and 1,141 deletions.
12 changes: 6 additions & 6 deletions pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -170,9 +170,9 @@
</dependency>

<dependency>
<groupId>net.sf.trove4j</groupId>
<artifactId>trove4j</artifactId>
<version>3.0.3</version>
<groupId>com.carrotsearch</groupId>
<artifactId>hppc</artifactId>
<version>0.5.2</version>
</dependency>

<dependency>
Expand Down Expand Up @@ -439,7 +439,7 @@
<artifactSet>
<includes>
<include>com.google.guava:guava</include>
<include>net.sf.trove4j:trove4j</include>
<include>com.carrotsearch:hppc</include>
<include>org.mvel:mvel2</include>
<include>com.fasterxml.jackson.core:jackson-core</include>
<include>com.fasterxml.jackson.dataformat:jackson-dataformat-smile</include>
Expand All @@ -455,8 +455,8 @@
<shadedPattern>org.elasticsearch.common</shadedPattern>
</relocation>
<relocation>
<pattern>gnu.trove</pattern>
<shadedPattern>org.elasticsearch.common.trove</shadedPattern>
<pattern>com.carrotsearch.hppc</pattern>
<shadedPattern>org.elasticsearch.common.hppc</shadedPattern>
</relocation>
<relocation>
<pattern>jsr166y</pattern>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -755,7 +755,10 @@ protected Query getBooleanQuery(List<BooleanClause> clauses, boolean disableCoor

private void applyBoost(String field, Query q) {
if (settings.boosts() != null) {
float boost = settings.boosts().get(field);
float boost = 1f;
if (settings.boosts().containsKey(field)) {
boost = settings.boosts().lget();
}
q.setBoost(boost);
}
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@

package org.apache.lucene.queryparser.classic;

import gnu.trove.map.hash.TObjectFloatHashMap;
import com.carrotsearch.hppc.ObjectFloatOpenHashMap;
import org.apache.lucene.analysis.Analyzer;
import org.apache.lucene.search.FuzzyQuery;
import org.apache.lucene.search.MultiTermQuery;
Expand Down Expand Up @@ -63,7 +63,7 @@ public class QueryParserSettings {

List<String> fields = null;
Collection<String> queryTypes = null;
TObjectFloatHashMap<String> boosts = null;
ObjectFloatOpenHashMap<String> boosts = null;
float tieBreaker = 0.0f;
boolean useDisMax = true;

Expand Down Expand Up @@ -272,11 +272,11 @@ public void queryTypes(Collection<String> queryTypes) {
this.queryTypes = queryTypes;
}

public TObjectFloatHashMap<String> boosts() {
public ObjectFloatOpenHashMap<String> boosts() {
return boosts;
}

public void boosts(TObjectFloatHashMap<String> boosts) {
public void boosts(ObjectFloatOpenHashMap<String> boosts) {
this.boosts = boosts;
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@
*/
package org.apache.lucene.search.suggest.analyzing;

import gnu.trove.map.hash.TObjectIntHashMap;
import com.carrotsearch.hppc.ObjectIntOpenHashMap;
import org.apache.lucene.analysis.Analyzer;
import org.apache.lucene.analysis.TokenStream;
import org.apache.lucene.analysis.TokenStreamToAutomaton;
Expand All @@ -33,6 +33,7 @@
import org.apache.lucene.util.fst.FST.BytesReader;
import org.apache.lucene.util.fst.PairOutputs.Pair;
import org.apache.lucene.util.fst.Util.MinResult;
import org.elasticsearch.common.hppc.HppcMaps;

import java.io.File;
import java.io.IOException;
Expand Down Expand Up @@ -917,7 +918,7 @@ public static class XBuilder {
private BytesRef analyzed = new BytesRef();
private final SurfaceFormAndPayload[] surfaceFormsAndPayload;
private int count;
private TObjectIntHashMap<BytesRef> seenSurfaceForms = new TObjectIntHashMap<BytesRef>(256, 0.75f, -1);
private ObjectIntOpenHashMap<BytesRef> seenSurfaceForms = HppcMaps.Object.Integer.ensureNoNullKeys(256, 0.75f);

public XBuilder(int maxSurfaceFormsPerAnalyzedForm, boolean hasPayloads) {
this.outputs = new PairOutputs<Long, BytesRef>(PositiveIntOutputs.getSingleton(), ByteSequenceOutputs.getSingleton());
Expand Down Expand Up @@ -969,7 +970,8 @@ public void addSurface(BytesRef surface, BytesRef payload, long cost) throws IOE
return;
}
BytesRef surfaceCopy;
if (count > 0 && (surfaceIndex = seenSurfaceForms.get(surface)) >= 0) {
if (count > 0 && seenSurfaceForms.containsKey(surface)) {
surfaceIndex = seenSurfaceForms.lget();
SurfaceFormAndPayload surfaceFormAndPayload = surfaceFormsAndPayload[surfaceIndex];
if (encodedWeight >= surfaceFormAndPayload.weight) {
return;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,6 @@

package org.elasticsearch.action.deletebyquery;

import gnu.trove.set.hash.THashSet;
import org.elasticsearch.action.ActionRequestValidationException;
import org.elasticsearch.action.support.replication.IndexReplicationOperationRequest;
import org.elasticsearch.common.Nullable;
Expand All @@ -30,6 +29,7 @@
import org.elasticsearch.common.unit.TimeValue;

import java.io.IOException;
import java.util.HashSet;
import java.util.Set;

import static org.elasticsearch.action.ValidateActions.addValidationError;
Expand Down Expand Up @@ -102,7 +102,7 @@ public void readFrom(StreamInput in) throws IOException {
}
int routingSize = in.readVInt();
if (routingSize > 0) {
routing = new THashSet<String>(routingSize);
routing = new HashSet<String>(routingSize);
for (int i = 0; i < routingSize; i++) {
routing.add(in.readString());
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,6 @@

package org.elasticsearch.action.deletebyquery;

import gnu.trove.set.hash.THashSet;
import org.elasticsearch.action.ActionRequestValidationException;
import org.elasticsearch.action.support.replication.ShardReplicationOperationRequest;
import org.elasticsearch.common.Nullable;
Expand All @@ -31,6 +30,7 @@

import java.io.IOException;
import java.util.Arrays;
import java.util.HashSet;
import java.util.Set;

import static org.elasticsearch.action.ValidateActions.addValidationError;
Expand Down Expand Up @@ -101,7 +101,7 @@ public void readFrom(StreamInput in) throws IOException {
types = in.readStringArray();
int routingSize = in.readVInt();
if (routingSize > 0) {
routing = new THashSet<String>(routingSize);
routing = new HashSet<String>(routingSize);
for (int i = 0; i < routingSize; i++) {
routing.add(in.readString());
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -19,8 +19,8 @@

package org.elasticsearch.action.get;

import gnu.trove.list.array.TIntArrayList;
import gnu.trove.list.array.TLongArrayList;
import com.carrotsearch.hppc.IntArrayList;
import com.carrotsearch.hppc.LongArrayList;
import org.elasticsearch.action.support.single.shard.SingleShardOperationRequest;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.io.stream.StreamInput;
Expand All @@ -39,11 +39,11 @@ public class MultiGetShardRequest extends SingleShardOperationRequest<MultiGetSh
Boolean realtime;
boolean refresh;

TIntArrayList locations;
IntArrayList locations;
List<String> types;
List<String> ids;
List<String[]> fields;
TLongArrayList versions;
LongArrayList versions;
List<VersionType> versionTypes;
List<FetchSourceContext> fetchSourceContexts;

Expand All @@ -54,11 +54,11 @@ public class MultiGetShardRequest extends SingleShardOperationRequest<MultiGetSh
MultiGetShardRequest(String index, int shardId) {
super(index);
this.shardId = shardId;
locations = new TIntArrayList();
locations = new IntArrayList();
types = new ArrayList<String>();
ids = new ArrayList<String>();
fields = new ArrayList<String[]>();
versions = new TLongArrayList();
versions = new LongArrayList();
versionTypes = new ArrayList<VersionType>();
fetchSourceContexts = new ArrayList<FetchSourceContext>();
}
Expand Down Expand Up @@ -113,11 +113,11 @@ public void add(int location, @Nullable String type, String id, String[] fields,
public void readFrom(StreamInput in) throws IOException {
super.readFrom(in);
int size = in.readVInt();
locations = new TIntArrayList(size);
locations = new IntArrayList(size);
types = new ArrayList<String>(size);
ids = new ArrayList<String>(size);
fields = new ArrayList<String[]>(size);
versions = new TLongArrayList(size);
versions = new LongArrayList(size);
versionTypes = new ArrayList<VersionType>(size);
fetchSourceContexts = new ArrayList<FetchSourceContext>(size);
for (int i = 0; i < size; i++) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@

package org.elasticsearch.action.get;

import gnu.trove.list.array.TIntArrayList;
import com.carrotsearch.hppc.IntArrayList;
import org.elasticsearch.action.ActionResponse;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
Expand All @@ -30,12 +30,12 @@

public class MultiGetShardResponse extends ActionResponse {

TIntArrayList locations;
IntArrayList locations;
List<GetResponse> responses;
List<MultiGetResponse.Failure> failures;

MultiGetShardResponse() {
locations = new TIntArrayList();
locations = new IntArrayList();
responses = new ArrayList<GetResponse>();
failures = new ArrayList<MultiGetResponse.Failure>();
}
Expand All @@ -56,7 +56,7 @@ public void add(int location, MultiGetResponse.Failure failure) {
public void readFrom(StreamInput in) throws IOException {
super.readFrom(in);
int size = in.readVInt();
locations = new TIntArrayList(size);
locations = new IntArrayList(size);
responses = new ArrayList<GetResponse>(size);
failures = new ArrayList<MultiGetResponse.Failure>(size);
for (int i = 0; i < size; i++) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@

package org.elasticsearch.action.percolate;

import gnu.trove.list.array.TIntArrayList;
import com.carrotsearch.hppc.IntArrayList;
import org.elasticsearch.ExceptionsHelper;
import org.elasticsearch.action.ActionListener;
import org.elasticsearch.action.get.*;
Expand Down Expand Up @@ -76,7 +76,7 @@ protected void doExecute(final MultiPercolateRequest request, final ActionListen
final List<Object> percolateRequests = new ArrayList<Object>(request.requests().size());
// Can have a mixture of percolate requests. (normal percolate requests & percolate existing doc),
// so we need to keep track for what percolate request we had a get request
final TIntArrayList getRequestSlots = new TIntArrayList();
final IntArrayList getRequestSlots = new IntArrayList();
List<GetRequest> existingDocsRequests = new ArrayList<GetRequest>();
for (int slot = 0; slot < request.requests().size(); slot++) {
PercolateRequest percolateRequest = request.requests().get(slot);
Expand Down Expand Up @@ -139,7 +139,7 @@ private class ASyncAction {
final Map<ShardId, TransportShardMultiPercolateAction.Request> requestsByShard;
final List<Object> percolateRequests;

final Map<ShardId, TIntArrayList> shardToSlots;
final Map<ShardId, IntArrayList> shardToSlots;
final AtomicInteger expectedOperations;
final AtomicArray<Object> reducedResponses;
final AtomicReferenceArray<AtomicInteger> expectedOperationsPerItem;
Expand All @@ -155,7 +155,7 @@ private class ASyncAction {
// Resolving concrete indices and routing and grouping the requests by shard
requestsByShard = new HashMap<ShardId, TransportShardMultiPercolateAction.Request>();
// Keep track what slots belong to what shard, in case a request to a shard fails on all copies
shardToSlots = new HashMap<ShardId, TIntArrayList>();
shardToSlots = new HashMap<ShardId, IntArrayList>();
int expectedResults = 0;
for (int slot = 0; slot < percolateRequests.size(); slot++) {
Object element = percolateRequests.get(slot);
Expand All @@ -180,9 +180,9 @@ private class ASyncAction {
logger.trace("Adding shard[{}] percolate request for item[{}]", shardId, slot);
requests.add(new TransportShardMultiPercolateAction.Request.Item(slot, new PercolateShardRequest(shardId, percolateRequest)));

TIntArrayList items = shardToSlots.get(shardId);
IntArrayList items = shardToSlots.get(shardId);
if (items == null) {
shardToSlots.put(shardId, items = new TIntArrayList());
shardToSlots.put(shardId, items = new IntArrayList());
}
items.add(slot);
}
Expand Down Expand Up @@ -257,7 +257,7 @@ void onShardResponse(ShardId shardId, TransportShardMultiPercolateAction.Respons
void onShardFailure(ShardId shardId, Throwable e) {
logger.debug("{} Shard multi percolate failure", e, shardId);
try {
TIntArrayList slots = shardToSlots.get(shardId);
IntArrayList slots = shardToSlots.get(shardId);
for (int i = 0; i < slots.size(); i++) {
int slot = slots.get(i);
AtomicReferenceArray shardResults = responsesByItemAndShard.get(slot);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,7 @@

package org.elasticsearch.action.search.type;

import com.carrotsearch.hppc.IntArrayList;
import org.elasticsearch.action.ActionListener;
import org.elasticsearch.action.search.ReduceSearchPhaseException;
import org.elasticsearch.action.search.SearchOperationThreading;
Expand All @@ -28,7 +29,6 @@
import org.elasticsearch.cluster.node.DiscoveryNode;
import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.trove.ExtTIntArrayList;
import org.elasticsearch.common.util.concurrent.AtomicArray;
import org.elasticsearch.search.SearchShardTarget;
import org.elasticsearch.search.action.SearchServiceListener;
Expand Down Expand Up @@ -66,13 +66,13 @@ private class AsyncAction extends BaseAsyncAction<DfsSearchResult> {

final AtomicArray<QuerySearchResult> queryResults;
final AtomicArray<FetchSearchResult> fetchResults;
final AtomicArray<ExtTIntArrayList> docIdsToLoad;
final AtomicArray<IntArrayList> docIdsToLoad;

private AsyncAction(SearchRequest request, ActionListener<SearchResponse> listener) {
super(request, listener);
queryResults = new AtomicArray<QuerySearchResult>(firstResults.length());
fetchResults = new AtomicArray<FetchSearchResult>(firstResults.length());
docIdsToLoad = new AtomicArray<ExtTIntArrayList>(firstResults.length());
docIdsToLoad = new AtomicArray<IntArrayList>(firstResults.length());
}

@Override
Expand Down Expand Up @@ -192,7 +192,7 @@ void innerExecuteFetchPhase() {

final AtomicInteger counter = new AtomicInteger(docIdsToLoad.asList().size());
int localOperations = 0;
for (final AtomicArray.Entry<ExtTIntArrayList> entry : docIdsToLoad.asList()) {
for (final AtomicArray.Entry<IntArrayList> entry : docIdsToLoad.asList()) {
QuerySearchResult queryResult = queryResults.get(entry.index);
DiscoveryNode node = nodes.get(queryResult.shardTarget().nodeId());
if (node.id().equals(nodes.localNodeId())) {
Expand All @@ -208,7 +208,7 @@ void innerExecuteFetchPhase() {
threadPool.executor(ThreadPool.Names.SEARCH).execute(new Runnable() {
@Override
public void run() {
for (final AtomicArray.Entry<ExtTIntArrayList> entry : docIdsToLoad.asList()) {
for (final AtomicArray.Entry<IntArrayList> entry : docIdsToLoad.asList()) {
QuerySearchResult queryResult = queryResults.get(entry.index);
DiscoveryNode node = nodes.get(queryResult.shardTarget().nodeId());
if (node.id().equals(nodes.localNodeId())) {
Expand All @@ -220,7 +220,7 @@ public void run() {
});
} else {
boolean localAsync = request.operationThreading() == SearchOperationThreading.THREAD_PER_SHARD;
for (final AtomicArray.Entry<ExtTIntArrayList> entry : docIdsToLoad.asList()) {
for (final AtomicArray.Entry<IntArrayList> entry : docIdsToLoad.asList()) {
final QuerySearchResult queryResult = queryResults.get(entry.index);
final DiscoveryNode node = nodes.get(queryResult.shardTarget().nodeId());
if (node.id().equals(nodes.localNodeId())) {
Expand Down
Loading

0 comments on commit 088e05b

Please sign in to comment.