Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Add a version check in tests #527

Merged
merged 1 commit into from
Feb 23, 2018
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -53,6 +53,7 @@
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Properties;
import java.util.TimeZone;

public class FsCrawlerUtil {
Expand Down Expand Up @@ -316,6 +317,21 @@ public static void copyResourceFile(String source, Path target) throws IOExcepti
FileUtils.copyInputStreamToFile(resource, target.toFile());
}

/**
* Read a property file from the class loader
* @param resource Resource name
* @return The properties loaded
*/
public static Properties readPropertiesFromClassLoader(String resource) {
Properties properties = new Properties();
try {
properties.load(FsCrawlerUtil.class.getClassLoader().getResourceAsStream(resource));
} catch (IOException e) {
throw new RuntimeException(e);
}
return properties;
}

/**
* Copy files from a source to a target
* under a _default sub directory.
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -19,20 +19,16 @@

package fr.pilato.elasticsearch.crawler.fs.framework;

import java.io.IOException;
import java.util.Properties;

import static fr.pilato.elasticsearch.crawler.fs.framework.FsCrawlerUtil.readPropertiesFromClassLoader;

public class Version {
private final static String FSCRAWLER_PROPERTIES = "fscrawler.properties";
public static final Properties properties;

static {
properties = new Properties();
try {
properties.load(Version.class.getClassLoader().getResourceAsStream(FSCRAWLER_PROPERTIES));
} catch (IOException e) {
throw new RuntimeException(e);
}
properties = readPropertiesFromClassLoader(FSCRAWLER_PROPERTIES);
}

public static String getVersion() {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -137,8 +137,6 @@ FsCrawlerImpl startCrawler(final String jobName, Fs fs, Elasticsearch elasticsea
throws Exception {
logger.info(" --> starting crawler [{}]", jobName);

// TODO do this rarely() createIndex(jobName);

crawler = new FsCrawlerImpl(
metadataDir,
FsSettings.builder(jobName).setElasticsearch(elasticsearch).setFs(fs).setServer(server).setRest(rest).build(),
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -72,6 +72,7 @@

import static fr.pilato.elasticsearch.crawler.fs.framework.FsCrawlerUtil.copyDefaultResources;
import static fr.pilato.elasticsearch.crawler.fs.framework.FsCrawlerUtil.copyDirs;
import static fr.pilato.elasticsearch.crawler.fs.framework.FsCrawlerUtil.readPropertiesFromClassLoader;
import static fr.pilato.elasticsearch.crawler.fs.framework.FsCrawlerUtil.unzip;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.Matchers.containsString;
Expand Down Expand Up @@ -235,8 +236,7 @@ public static void startElasticsearchRestClient() throws IOException {
} catch (ConnectException e) {
staticLogger.debug("No local node running. We need to start a Docker instance.");
// We start an elasticsearch Docker instance
Properties props = new Properties();
props.load(AbstractITCase.class.getResourceAsStream("/elasticsearch.version.properties"));
Properties props = readPropertiesFromClassLoader("elasticsearch.version.properties");
container = ElasticsearchContainerSingleton.getInstance(props.getProperty("version"),
testClusterUser, testClusterPass);

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -34,7 +34,9 @@
import java.io.IOException;
import java.util.Collection;
import java.util.Collections;
import java.util.Properties;

import static fr.pilato.elasticsearch.crawler.fs.framework.FsCrawlerUtil.readPropertiesFromClassLoader;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.Matchers.is;
import static org.hamcrest.Matchers.iterableWithSize;
Expand Down Expand Up @@ -140,7 +142,11 @@ public void testFindVersion() throws IOException {
Version version = elasticsearchClient.info().getVersion();
logger.info("Current elasticsearch version: [{}]", version);

// TODO if we store in a property file the elasticsearch version we are running tests against we can add some assertions
// If we did not use an external URL but the docker instance we can test for sure that the version is the expected one
if (System.getProperty("tests.cluster.host") == null) {
Properties properties = readPropertiesFromClassLoader("elasticsearch.version.properties");
assertThat(version.toString(), is(properties.getProperty("version")));
}
}

@Test
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -21,11 +21,10 @@

import fr.pilato.elasticsearch.crawler.fs.test.framework.AbstractFSCrawlerTestCase;

import java.io.IOException;
import java.io.InputStream;

public class DocParserTestCase extends AbstractFSCrawlerTestCase {
InputStream getBinaryContent(String filename) throws IOException {
InputStream getBinaryContent(String filename) {
return getClass().getResourceAsStream("/documents/" + filename);
}
}