Commit fabeff4e authored by Luc Maisonobe's avatar Luc Maisonobe

Added FilesListCrawler to load files from an explicit list.

fixes #592
parent 4c4b2d02
......@@ -21,6 +21,9 @@
</properties>
<body>
<release version="10.1" date="TBD" description="TBD">
<action dev="luc" type="add" issue="592">
Added FilesListCrawler to load files from an explicit list.
</action>
<action dev="evan" type="fix" issue="583">
Fix AbsoluteDate.compareTo() for future/past infinity.
</action>
......
......@@ -43,7 +43,7 @@ import org.orekit.errors.OrekitException;
* @see FilesListCrawler
* @author Luc Maisonobe
*/
public abstract class ListCrawler<T> implements DataProvider {
public abstract class AbstractListCrawler<T> implements DataProvider {
/** Inputs list. */
private final List<T> inputs;
......@@ -52,7 +52,7 @@ public abstract class ListCrawler<T> implements DataProvider {
* @param inputs list of inputs (may be empty if {@link #addInput(Object) addInput} is called later)
*/
@SafeVarargs
protected ListCrawler(final T... inputs) {
protected AbstractListCrawler(final T... inputs) {
this.inputs = Arrays.stream(inputs).collect(Collectors.toList());
}
......
/* Copyright 2002-2019 CS Systèmes d'Information
* Licensed to CS Systèmes d'Information (CS) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* CS licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.orekit.data;
import java.io.File;
import java.io.FileInputStream;
import java.io.IOException;
import java.io.InputStream;
/** Provider for data files in an explicit list.
* <p>
* Zip archives entries are supported recursively.
* </p>
* <p>
* This is a simple application of the <code>visitor</code> design pattern for
* list browsing.
* </p>
* @see DataProvidersManager
* @since 10.1
* @author Luc Maisonobe
*/
public class FilesListCrawler extends AbstractListCrawler<File> {
/** Build a data classpath crawler.
* <p>The default timeout is set to 10 seconds.</p>
* @param inputs list of input files
*/
public FilesListCrawler(final File... inputs) {
super(inputs);
}
/** {@inheritDoc} */
@Override
protected String getCompleteName(final File input) {
return input.getPath();
}
/** {@inheritDoc} */
@Override
protected String getBaseName(final File input) {
return input.getName();
}
/** {@inheritDoc} */
@Override
protected ZipJarCrawler getZipJarCrawler(final File input) {
return new ZipJarCrawler(input);
}
/** {@inheritDoc} */
@Override
protected InputStream getStream(final File input) throws IOException {
return new FileInputStream(input);
}
}
......@@ -68,7 +68,7 @@ import org.orekit.errors.OrekitException;
* @see DataProvidersManager
* @author Luc Maisonobe
*/
public class NetworkCrawler extends ListCrawler<URL> {
public class NetworkCrawler extends AbstractListCrawler<URL> {
/** Connection timeout (milliseconds). */
private int timeout;
......
......@@ -288,12 +288,14 @@ discrete events. Here is a short list of the features offered by the library:</p
</li>
<li>Customizable data loading
<ul>
<li>loading from local disk</li>
<li>loading by exploring folders hierarchy on local disk</li>
<li>loading from explicit lists of files on local disk</li>
<li>loading from classpath</li>
<li>loading from network (even through internet proxies)</li>
<li>support for zip archives</li>
<li>automatic decompression of gzip compressed (.gz) files upon loading</li>
<li>automatic decompression of Unix compressed (.Z) files upon loading</li>
<li>automatic decompression of Hatanaka compressed files upon loading</li>
<li>plugin mechanism to add filtering like custom decompression algorithms, deciphering or monitoring</li>
<li>plugin mechanism to delegate loading to user defined database or data access library</li>
</ul>
......
......@@ -261,12 +261,14 @@
* Customizable data loading
* loading from local disk
* loading by exploring folders hierarchy on local disk
* loading from explicit lists of files on local disk
* loading from classpath
* loading from network (even through internet proxies)
* support for zip archives
* automatic decompression of gzip compressed (.gz) files upon loading
* automatic decompression of Unix compressed (.Z) files upon loading
* automatic decompression of Hatanaka compressed files upon loading
* plugin mechanism to add filtering like custom decompression algorithms, deciphering or monitoring
* plugin mechanism to delegate loading to user defined database or data access library
......
/* Copyright 2002-2019 CS Systèmes d'Information
* Licensed to CS Systèmes d'Information (CS) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* CS licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.orekit.data;
import java.io.IOException;
import java.io.InputStream;
import java.text.ParseException;
import java.util.regex.Pattern;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
import org.orekit.errors.OrekitException;
public abstract class AbstractListCrawlerTest<T> {
protected abstract T input(String resource);
protected abstract AbstractListCrawler<T> build(String... inputs);
@Before
public void setUp() {
// Clear any filters that another test may have left
DataProvidersManager.getInstance().clearFilters();
}
@Test
public void local() {
CountingLoader crawler = new CountingLoader();
AbstractListCrawler<T> nc = build("regular-data/UTC-TAI.history",
"regular-data/de405-ephemerides/unxp0000.405",
"regular-data/de405-ephemerides/unxp0001.405",
"regular-data/de406-ephemerides/unxp0000.406");
Assert.assertEquals(4, nc.getInputs().size());
nc.addInput(input("regular-data/Earth-orientation-parameters/monthly/bulletinb_IAU2000-216.txt"));
Assert.assertEquals(5, nc.getInputs().size());
nc.feed(Pattern.compile(".*"), crawler);
Assert.assertEquals(5, crawler.getCount());
}
@Test
public void compressed() {
CountingLoader crawler = new CountingLoader();
AbstractListCrawler<T> nc = build();
nc.addInput(input("compressed-data/UTC-TAI.history.gz"));
nc.addInput(input("compressed-data/eopc04_08_IAU2000.00.gz"));
nc.addInput(input("compressed-data/eopc04_08_IAU2000.02.gz"));
nc.feed(Pattern.compile("^eopc04.*"), crawler);
Assert.assertEquals(2, crawler.getCount());
}
@Test
public void multiZip() {
CountingLoader crawler = new CountingLoader();
build("zipped-data/multizip.zip").feed(Pattern.compile(".*\\.txt$"), crawler);
Assert.assertEquals(6, crawler.getCount());
}
@Test(expected=OrekitException.class)
public void ioException() {
try {
build("regular-data/UTC-TAI.history").feed(Pattern.compile(".*"), new IOExceptionLoader());
} catch (OrekitException oe) {
// expected behavior
Assert.assertNotNull(oe.getCause());
Assert.assertEquals(IOException.class, oe.getCause().getClass());
Assert.assertEquals("dummy error", oe.getMessage());
throw oe;
}
}
@Test(expected=OrekitException.class)
public void parseException() {
try {
build("regular-data/UTC-TAI.history").feed(Pattern.compile(".*"), new ParseExceptionLoader());
} catch (OrekitException oe) {
// expected behavior
Assert.assertNotNull(oe.getCause());
Assert.assertEquals(ParseException.class, oe.getCause().getClass());
Assert.assertEquals("dummy error", oe.getMessage());
throw oe;
}
}
protected static class CountingLoader implements DataLoader {
private int count = 0;
public boolean stillAcceptsData() {
return true;
}
public void loadData(InputStream input, String name) {
++count;
}
public int getCount() {
return count;
}
}
private static class IOExceptionLoader implements DataLoader {
public boolean stillAcceptsData() {
return true;
}
public void loadData(InputStream input, String name) throws IOException {
if (name.endsWith("UTC-TAI.history")) {
throw new IOException("dummy error");
}
}
}
private static class ParseExceptionLoader implements DataLoader {
public boolean stillAcceptsData() {
return true;
}
public void loadData(InputStream input, String name) throws ParseException {
if (name.endsWith("UTC-TAI.history")) {
throw new ParseException("dummy error", 0);
}
}
}
}
/* Copyright 2002-2019 CS Systèmes d'Information
* Licensed to CS Systèmes d'Information (CS) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* CS licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.orekit.data;
import java.io.File;
import java.io.FileNotFoundException;
import java.net.URISyntaxException;
import java.util.regex.Pattern;
import org.junit.Assert;
import org.junit.Test;
import org.orekit.errors.OrekitException;
public class FilesListCrawlerTest extends AbstractListCrawlerTest<File> {
protected File input(String resource) {
try {
return new File(FilesListCrawlerTest.class.getClassLoader().getResource(resource).toURI().getPath());
} catch (URISyntaxException ue) {
Assert.fail(ue.getLocalizedMessage());
return null;
}
}
protected FilesListCrawler build(String... inputs) {
File[] converted = new File[inputs.length];
for (int i = 0; i < inputs.length; ++i) {
converted[i] = input(inputs[i]);
}
return new FilesListCrawler(converted);
}
@Test
public void noElement() {
try {
File existing = new File(input("regular-data").getPath());
File inexistent = new File(existing.getParent(), "inexistant-directory");
new FilesListCrawler(inexistent).feed(Pattern.compile(".*"), new CountingLoader());
Assert.fail("an exception should have been thrown");
} catch (OrekitException oe) {
Assert.assertTrue(oe.getCause() instanceof FileNotFoundException);
Assert.assertTrue(oe.getLocalizedMessage().contains("inexistant-directory"));
}
}
}
......@@ -18,31 +18,42 @@ package org.orekit.data;
import java.io.File;
import java.io.IOException;
import java.io.InputStream;
import java.io.FileNotFoundException;
import java.net.MalformedURLException;
import java.net.URL;
import java.text.ParseException;
import java.util.regex.Pattern;
import org.junit.Assert;
import org.junit.Test;
import org.junit.Before;
import org.orekit.errors.OrekitException;
public class NetworkCrawlerTest {
public class NetworkCrawlerTest extends AbstractListCrawlerTest<URL> {
@Before
public void setUp() {
// Clear any filters that another test may have left
DataProvidersManager.getInstance().clearFilters();
protected URL input(String resource) {
return NetworkCrawlerTest.class.getClassLoader().getResource(resource);
}
@Test(expected=OrekitException.class)
protected NetworkCrawler build(String... inputs) {
URL[] converted = new URL[inputs.length];
for (int i = 0; i < inputs.length; ++i) {
converted[i] = input(inputs[i]);
}
final NetworkCrawler nc = new NetworkCrawler(converted);
nc.setTimeout(20);
return nc;
}
@Test
public void noElement() throws MalformedURLException {
File existing = new File(url("regular-data").getPath());
File inexistent = new File(existing.getParent(), "inexistant-directory");
new NetworkCrawler(inexistent.toURI().toURL()).feed(Pattern.compile(".*"), new CountingLoader());
try {
File existing = new File(input("regular-data").getPath());
File inexistent = new File(existing.getParent(), "inexistant-directory");
new NetworkCrawler(inexistent.toURI().toURL()).feed(Pattern.compile(".*"), new CountingLoader());
Assert.fail("an exception should have been thrown");
} catch (OrekitException oe) {
Assert.assertTrue(oe.getCause() instanceof FileNotFoundException);
Assert.assertTrue(oe.getLocalizedMessage().contains("inexistant-directory"));
}
}
// WARNING!
......@@ -66,103 +77,4 @@ public class NetworkCrawlerTest {
//
// }
@Test
public void local() {
CountingLoader crawler = new CountingLoader();
NetworkCrawler nc = new NetworkCrawler(url("regular-data/UTC-TAI.history"),
url("regular-data/de405-ephemerides/unxp0000.405"),
url("regular-data/de405-ephemerides/unxp0001.405"),
url("regular-data/de406-ephemerides/unxp0000.406"));
Assert.assertEquals(4, nc.getInputs().size());
nc.addInput(url("regular-data/Earth-orientation-parameters/monthly/bulletinb_IAU2000-216.txt"));
nc.addInput(url("no-data"));
Assert.assertEquals(6, nc.getInputs().size());
nc.setTimeout(20);
nc.feed(Pattern.compile(".*"), crawler);
Assert.assertEquals(6, crawler.getCount());
}
@Test
public void compressed() {
CountingLoader crawler = new CountingLoader();
NetworkCrawler nc = new NetworkCrawler();
nc.addInput(url("compressed-data/UTC-TAI.history.gz"));
nc.addInput(url("compressed-data/eopc04_08_IAU2000.00.gz"));
nc.addInput(url("compressed-data/eopc04_08_IAU2000.02.gz"));
nc.feed(Pattern.compile("^eopc04.*"), crawler);
Assert.assertEquals(2, crawler.getCount());
}
@Test
public void multiZip() {
CountingLoader crawler = new CountingLoader();
new NetworkCrawler(url("zipped-data/multizip.zip")).feed(Pattern.compile(".*\\.txt$"), crawler);
Assert.assertEquals(6, crawler.getCount());
}
@Test(expected=OrekitException.class)
public void ioException() {
try {
new NetworkCrawler(url("regular-data/UTC-TAI.history")).feed(Pattern.compile(".*"), new IOExceptionLoader());
} catch (OrekitException oe) {
// expected behavior
Assert.assertNotNull(oe.getCause());
Assert.assertEquals(IOException.class, oe.getCause().getClass());
Assert.assertEquals("dummy error", oe.getMessage());
throw oe;
}
}
@Test(expected=OrekitException.class)
public void parseException() {
try {
new NetworkCrawler(url("regular-data/UTC-TAI.history")).feed(Pattern.compile(".*"), new ParseExceptionLoader());
} catch (OrekitException oe) {
// expected behavior
Assert.assertNotNull(oe.getCause());
Assert.assertEquals(ParseException.class, oe.getCause().getClass());
Assert.assertEquals("dummy error", oe.getMessage());
throw oe;
}
}
private static class CountingLoader implements DataLoader {
private int count = 0;
public boolean stillAcceptsData() {
return true;
}
public void loadData(InputStream input, String name) {
++count;
}
public int getCount() {
return count;
}
}
private static class IOExceptionLoader implements DataLoader {
public boolean stillAcceptsData() {
return true;
}
public void loadData(InputStream input, String name) throws IOException {
if (name.endsWith("UTC-TAI.history")) {
throw new IOException("dummy error");
}
}
}
private static class ParseExceptionLoader implements DataLoader {
public boolean stillAcceptsData() {
return true;
}
public void loadData(InputStream input, String name) throws ParseException {
if (name.endsWith("UTC-TAI.history")) {
throw new ParseException("dummy error", 0);
}
}
}
private URL url(String resource) {
return DirectoryCrawlerTest.class.getClassLoader().getResource(resource);
}
}
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment