How to load a gzipped rdf file to rdf4j Repository

2020-04-23 03:35发布

问题:

I want to load a gzipped rdf file into a org.eclipse.rdf4j.repository.Repository. During the upload, status messages must be logged to the console. The size of my rdf file is ~1GB of uncompressed or ~50MB of compressed data.

回答1:

Actually an RDF4J repository will automatically process a compressed (zip/gzip) file correctly, already. So you can simply do this:

   RepositoryConnection conn = ... ; // your store connection
   conn.add(new File("file.zip"), null, RDFFormat.NTRIPLES):

If you want to include reporting, a different (somewhat simpler) approach is to use an org.eclipse.rdf4j.repository.util.RDFLoader class in combination with an RDFInserter:

RepositoryConnection conn = ... ; // your store connection
RDFInsert inserter = new RDFInserter(conn);
RDFLoader loader = new RDFLoader(conn.getParserConfig(), conn.getValueFactory());

loader.load(new File("file.zip"), RDFFormat.NTRIPLES, inserter));

The RDFLoader takes care of properly uncompressing the (zip or gzip) file.

To get intermediate reporting you can wrap your RDFInserter in your own custom AbstractRDFHandler that does the counting and reporting (before passing on to the wrapper inserter).



回答2:

Variant 1

The following sample will load an InputStream with gzipped data into an in-memory rdf repository. The zipped format is supported directly by rdf4j. Every 100000th statement will be printed to stdout using the RepositoryConnectionListenerAdapter.

import java.io.InputStream;

import org.eclipse.rdf4j.model.IRI;
import org.eclipse.rdf4j.model.Resource;
import org.eclipse.rdf4j.model.Value;
import org.eclipse.rdf4j.repository.Repository;
import org.eclipse.rdf4j.repository.RepositoryConnection;
import org.eclipse.rdf4j.repository.event.base.NotifyingRepositoryConnectionWrapper;
import org.eclipse.rdf4j.repository.event.base.RepositoryConnectionListenerAdapter;
import org.eclipse.rdf4j.repository.sail.SailRepository;
import org.eclipse.rdf4j.rio.RDFFormat;
import org.eclipse.rdf4j.sail.memory.MemoryStore;

public class MyTripleStore {

    Repository repo;

    /**
     * Creates an inmemory triple store
     * 
     */
    public MyTripleStore() {
        repo = new SailRepository(new MemoryStore());
        repo.initialize();
    }

    /**
     * @param in gzip compressed data on an inputstream
     * @param format the format of the streamed data
     */
    public void loadZippedFile(InputStream in, RDFFormat format) {
        System.out.println("Load zip file of format " + format);
        try (NotifyingRepositoryConnectionWrapper con =
                new NotifyingRepositoryConnectionWrapper(repo, repo.getConnection());) {
            RepositoryConnectionListenerAdapter myListener =
                    new RepositoryConnectionListenerAdapter() {
                        private long count = 0;
                        @Override
                        public void add(RepositoryConnection arg0, Resource arg1, IRI arg2, 
                                         Value arg3, Resource... arg4) {
                            count++;
                            if (count % 100000 == 0)
                                    System.out.println("Add statement number " + count + "\n" 
                                    + arg1+ " " + arg2 + " " + arg3);
                        }
                    };
            con.addRepositoryConnectionListener(myListener);
            con.add(in, "", format);
        } catch (Exception e) {
            throw new RuntimeException(e);
        }
    }
}

Variant 2

This variant implements an AbstractRDFHandler to provide the reporting.

import java.io.InputStream;

import org.eclipse.rdf4j.model.Statement;
import org.eclipse.rdf4j.repository.Repository;
import org.eclipse.rdf4j.repository.RepositoryConnection;
import org.eclipse.rdf4j.repository.sail.SailRepository;
import org.eclipse.rdf4j.repository.util.RDFInserter;
import org.eclipse.rdf4j.repository.util.RDFLoader;
import org.eclipse.rdf4j.rio.RDFFormat;
import org.eclipse.rdf4j.rio.helpers.AbstractRDFHandler;
import org.eclipse.rdf4j.sail.memory.MemoryStore;

public class MyTripleStore {
    Repository repo;

    /**
      * Creates an inmemory triple store
      * 
      */
    public MyTripleStore() {
        repo = new SailRepository(new MemoryStore());
        repo.initialize();
   }

    /**
     * @param in gzip compressed data on an inputstream
     * @param format the format of the streamed data
     */
    public void loadZippedFile1(InputStream in, RDFFormat format) {
        try (RepositoryConnection con = repo.getConnection()) {
            MyRdfInserter inserter = new MyRdfInserter(con);
            RDFLoader loader =
                    new RDFLoader(con.getParserConfig(), con.getValueFactory());
            loader.load(in, "", RDFFormat.NTRIPLES, inserter);
        } catch (Exception e) {
            throw new RuntimeException(e);
        }
    }

    class MyRdfInserter extends AbstractRDFHandler {
        RDFInserter rdfInserter;
        int count = 0;

        public MyRdfInserter(RepositoryConnection con) {
            rdfInserter = new RDFInserter(con);
        }

        @Override
        public void handleStatement(Statement st) {
            count++;
            if (count % 100000 == 0)
                System.out.println("Add statement number " + count + "\n"
                        + st.getSubject().stringValue() + " "
                        + st.getPredicate().stringValue() + " "
                        + st.getObject().stringValue());
            rdfInserter.handleStatement(st);
        }
    }
}

Here is, how to call the code

MyTripleStore ts = new MyTripleStore();
ts.loadZippedFile(new FileInputStream("your-ntriples-zipped.gz"),
            RDFFormat.NTRIPLES);