/**
  * Create a new triple from the subject, predicate and object just mapped and then add it to the
  * Datalift repository
  *
  * @param s subject
  * @param p predicate
  * @param o object
  * @param ctx uri context
  */
 @Override
 public void add(Resource s, URI p, Value o, Resource... ctx) {
   try {
     // if there is no opened connection (so there are no pending transactions) create a new one
     if (transactionCnx == null) {
       transactionCnx = internalRepository.newConnection();
       // this way we will commit the adding transactions, batch after batch
       transactionCnx.setAutoCommit(false);
     }
     ValueFactory myFactory = transactionCnx.getValueFactory();
     // obtain the triple
     Statement st = myFactory.createStatement(s, p, (Value) o);
     if (ctx.length == 0) {
       // if there is no uri context add the triple to the default one
       transactionCnx.add(st, context);
     } else {
       transactionCnx.add(st, ctx);
     }
     statementCount++;
     if (statementCount % batchSize == 0) {
       // we added enoght triples to commit the transaction
       transactionCnx.commit();
     }
   } catch (RepositoryException e) {
     throw new TechnicalException("triples.saving.failed", e);
   } finally {
     try {
       transactionCnx.close();
     } catch (RepositoryException e) {
       /* Ignore */
     }
   }
 }
 /**
  * Creates a new dataset instance with custom batch size
  *
  * @param repository Datalift RDF Store where the mapped triples can be saved
  * @param targetUri the base uri of the resulting mapped graph
  * @param batchSize triples to add to the store before committing the transaction
  */
 public StoreMappedDataSet(Repository repository, String targetUri, int batchSize) {
   this.internalRepository = repository;
   RepositoryConnection conn = internalRepository.newConnection();
   this.context = conn.getValueFactory().createURI(targetUri);
   this.batchSize = batchSize;
 }