Exemple #1
0
  @Test
  public void testjOOQTransactionsNested() {
    AtomicBoolean rollback1 = new AtomicBoolean(false);
    AtomicBoolean rollback2 = new AtomicBoolean(false);

    try {

      // If using Spring transactions, we don't need the c1 reference
      dsl.transaction(
          c1 -> {

            // The first insertion will work
            dsl.insertInto(BOOK)
                .set(BOOK.ID, 5)
                .set(BOOK.AUTHOR_ID, 1)
                .set(BOOK.TITLE, "Book 5")
                .execute();

            assertEquals(5, dsl.fetchCount(BOOK));

            try {

              // Nest transactions using Spring. This should create a savepoint, right here
              // If using Spring transactions, we don't need the c2 reference
              dsl.transaction(
                  c2 -> {

                    // The second insertion shouldn't work
                    for (int i = 0; i < 2; i++)
                      dsl.insertInto(BOOK)
                          .set(BOOK.ID, 6)
                          .set(BOOK.AUTHOR_ID, 1)
                          .set(BOOK.TITLE, "Book 6")
                          .execute();

                    Assert.fail();
                  });
            } catch (DataAccessException e) {
              rollback1.set(true);
            }

            // We should've rolled back to the savepoint
            assertEquals(5, dsl.fetchCount(BOOK));

            throw new org.jooq.exception.DataAccessException("Rollback");
          });
    }

    // Upon the constraint violation, the transaction must already have been rolled back
    catch (org.jooq.exception.DataAccessException e) {
      assertEquals("Rollback", e.getMessage());
      rollback2.set(true);
    }

    assertEquals(4, dsl.fetchCount(BOOK));
    assertTrue(rollback2.get());
    assertTrue(rollback2.get());
  }
Exemple #2
0
  @Test
  public void testExplicitTransactions() {
    boolean rollback = false;

    TransactionStatus tx = txMgr.getTransaction(new DefaultTransactionDefinition());
    try {

      // This is a "bug". The same book is created twice, resulting in a
      // constraint violation exception
      for (int i = 0; i < 2; i++)
        dsl.insertInto(BOOK)
            .set(BOOK.ID, 5)
            .set(BOOK.AUTHOR_ID, 1)
            .set(BOOK.TITLE, "Book 5")
            .execute();

      Assert.fail();
    }

    // Upon the constraint violation, we explicitly roll back the transaction.
    catch (DataAccessException e) {
      txMgr.rollback(tx);
      rollback = true;
    }

    assertEquals(4, dsl.fetchCount(BOOK));
    assertTrue(rollback);
  }
 @Override
 public void subscribe(int userId, int newsSourceId) {
   dsl.insertInto(SUBSCRIPTIONS, SUBSCRIPTIONS.NEWS_SOURCE_ID, SUBSCRIPTIONS.USER_ID)
       .values(newsSourceId, userId)
       .onDuplicateKeyIgnore()
       .execute();
 }
Exemple #4
0
  @Test
  public void testjOOQTransactionsSimple() {
    boolean rollback = false;

    try {
      dsl.transaction(
          c -> {

            // This is a "bug". The same book is created twice, resulting in a
            // constraint violation exception
            for (int i = 0; i < 2; i++)
              dsl.insertInto(BOOK)
                  .set(BOOK.ID, 5)
                  .set(BOOK.AUTHOR_ID, 1)
                  .set(BOOK.TITLE, "Book 5")
                  .execute();

            Assert.fail();
          });
    }

    // Upon the constraint violation, the transaction must already have been rolled back
    catch (DataAccessException e) {
      rollback = true;
    }

    assertEquals(4, dsl.fetchCount(BOOK));
    assertTrue(rollback);
  }
Exemple #5
0
  private void testPostgresEnumArrayCRUD0(DSLContext create) throws Exception {
    jOOQAbstractTest.reset = false;

    TArrays a = T_ARRAYS;

    assertEquals(
        4,
        create
            .insertInto(a, a.ID, a.ENUM_ARRAY)
            .values(11, null)
            .values(12, new UCountry[0])
            .values(13, new UCountry[] {null})
            .values(14, new UCountry[] {UCountry.Brazil})
            .execute());

    List<UCountry[]> countries =
        create.select(a.ENUM_ARRAY).from(a).where(a.ID.gt(10)).orderBy(a.ID).fetch(a.ENUM_ARRAY);

    assertNull(countries.get(0));
    assertEquals(0, countries.get(1).length);
    assertEquals(1, countries.get(2).length);
    assertNull(countries.get(2)[0]);
    assertEquals(1, countries.get(3).length);
    assertEquals(UCountry.Brazil, countries.get(3)[0]);
  }
  @Override
  public void addChargePoint(List<String> chargeBoxIdList) {
    BatchBindStep batch = ctx.batch(ctx.insertInto(CHARGE_BOX).set(CHARGE_BOX.CHARGE_BOX_ID, ""));

    for (String s : chargeBoxIdList) {
      batch.bind(s);
    }

    batch.execute();
  }
  @Test
  public void testInsertByMultipleValues() {
    DSLContext create = using(conn, SQLDialect.MYSQL);

    create
        .insertInto(AUTHOR, AUTHOR.FIRST_NAME, AUTHOR.LAST_NAME)
        .values("li", "san")
        .values("wang", "san")
        .execute();
  }
 /* (non-Javadoc)
  * @see
  * edu.dfci.cccb.mev.dataset.domain.contract.ValueStoreBuilder#add(double,
  * java.lang.String, java.lang.String) */
 @Override
 public ValueStoreBuilder add(double value, String row, String column) throws ValueStoreException {
   context
       .insertInto(table)
       .set(this.row, row)
       .set(this.column, column)
       .set(this.value, value)
       .execute();
   return this;
 }
 @Test
 public void testSaveOrUpdateWhenDuplicate() {
   // Add a new author called "Koontz" with ID 3.
   // If that ID is already present, update the author's name
   create
       .insertInto(AUTHOR, AUTHOR.ID, AUTHOR.LAST_NAME)
       .values(3, "Koontz")
       .onDuplicateKeyUpdate()
       .set(AUTHOR.LAST_NAME, "Koontz")
       .execute();
 }
 private int addChargePointInternal(DSLContext ctx, ChargePointForm form, Integer addressPk) {
   return ctx.insertInto(CHARGE_BOX)
       .set(CHARGE_BOX.CHARGE_BOX_ID, form.getChargeBoxId())
       .set(CHARGE_BOX.DESCRIPTION, form.getDescription())
       .set(CHARGE_BOX.LOCATION_LATITUDE, form.getLocationLatitude())
       .set(CHARGE_BOX.LOCATION_LONGITUDE, form.getLocationLongitude())
       .set(CHARGE_BOX.NOTE, form.getNote())
       .set(CHARGE_BOX.ADDRESS_PK, addressPk)
       .returning(CHARGE_BOX.CHARGE_BOX_PK)
       .fetchOne()
       .getChargeBoxPk();
 }
  @Test
  public void testInsertBySet() {
    DSLContext create = using(conn, SQLDialect.MYSQL);

    create
        .insertInto(AUTHOR)
        .set(AUTHOR.FIRST_NAME, "Hermann")
        .set(AUTHOR.LAST_NAME, "Hesse")
        .newRecord()
        .set(AUTHOR.FIRST_NAME, "Alfred")
        .set(AUTHOR.LAST_NAME, "Döblin")
        .execute();
  }
  @Test
  public void testInsertAndReturn() {
    // Add another author, with a generated ID
    Record record =
        create
            .insertInto(AUTHOR, AUTHOR.FIRST_NAME, AUTHOR.LAST_NAME)
            .values("Charlotte", "Roche")
            .returning(AUTHOR.ID)
            .fetchOne();
    System.out.println(record.getValue(AUTHOR.ID));

    // For some RDBMS, this also works when inserting several values
    // The following should return a 2x2 table
    Result<?> result =
        create
            .insertInto(AUTHOR, AUTHOR.FIRST_NAME, AUTHOR.LAST_NAME)
            .values("Johann Wolfgang", "von Goethe")
            .values("Friedrich", "Schiller")
            // You can request any field. Also trigger-generated values
            .returning(AUTHOR.ID, AUTHOR.LAST_NAME)
            .fetch();
    System.out.println(result.getValue(0, AUTHOR.ID));
    System.out.println(result.getValue(1, AUTHOR.ID));
  }
 @Override
 public void updateBook(BookModel bookModel) {
   create
       .update(BOOK)
       .set(BOOK.TITLE, bookModel.getTitle())
       .set(BOOK.DATE_PUBLISH, bookModel.getDatePublish())
       .set(BOOK.SHORT_DESCRIPTION, bookModel.getShortDescription())
       .where(BOOK.ID_BOOK.equal(bookModel.getIdBook()))
       .execute();
   List<Integer> existsAuthors =
       create
           .select(AUTHOR_BOOK.ID_AUTHOR)
           .from(AUTHOR_BOOK)
           .where(AUTHOR_BOOK.ID_BOOK.equal(bookModel.getIdBook()))
           .fetchInto(Integer.class);
   logger.debug("Existing authors: " + existsAuthors);
   List<Integer> newAuthors = new ArrayList<>();
   List<Integer> authors = new ArrayList<>();
   /** */
   for (String idAuthor : bookModel.getAuthors()) {
     int id = Integer.valueOf(idAuthor);
     authors.add(id);
     if (!existsAuthors.contains(id)) {
       newAuthors.add(id);
     }
   }
   existsAuthors.removeAll(authors);
   logger.debug("New authors: " + newAuthors);
   if (!newAuthors.isEmpty()) {
     InsertValuesStep2<AuthorBookRecord, Integer, Integer> insertStep =
         create.insertInto(AUTHOR_BOOK, AUTHOR_BOOK.ID_AUTHOR, AUTHOR_BOOK.ID_BOOK);
     InsertValuesStep2<AuthorBookRecord, Integer, Integer> valuesStep = null;
     for (Integer idAuthor : newAuthors) {
       valuesStep = insertStep.values(idAuthor, bookModel.getIdBook());
     }
     valuesStep.execute();
   }
   logger.debug("Old authors: " + existsAuthors);
   if (!existsAuthors.isEmpty()) {
     create.delete(AUTHOR_BOOK).where(AUTHOR_BOOK.ID_AUTHOR.in(existsAuthors)).execute();
   }
 }
  /** Blocking version of {@link AsyncSpanConsumer#accept} */
  @Override
  public void accept(List<Span> spans) {
    if (spans.isEmpty()) return;
    try (Connection conn = datasource.getConnection()) {
      DSLContext create = context.get(conn);

      List<Query> inserts = new ArrayList<>();

      for (Span span : spans) {
        Long overridingTimestamp = authoritativeTimestamp(span);
        Long timestamp = overridingTimestamp != null ? overridingTimestamp : guessTimestamp(span);

        Map<TableField<Record, ?>, Object> updateFields = new LinkedHashMap<>();
        if (!span.name.equals("") && !span.name.equals("unknown")) {
          updateFields.put(ZIPKIN_SPANS.NAME, span.name);
        }
        // replace any tentative timestamp with the authoritative one.
        if (overridingTimestamp != null) {
          updateFields.put(ZIPKIN_SPANS.START_TS, overridingTimestamp);
        }
        if (span.duration != null) {
          updateFields.put(ZIPKIN_SPANS.DURATION, span.duration);
        }

        InsertSetMoreStep<Record> insertSpan =
            create
                .insertInto(ZIPKIN_SPANS)
                .set(ZIPKIN_SPANS.TRACE_ID, span.traceId)
                .set(ZIPKIN_SPANS.ID, span.id)
                .set(ZIPKIN_SPANS.PARENT_ID, span.parentId)
                .set(ZIPKIN_SPANS.NAME, span.name)
                .set(ZIPKIN_SPANS.DEBUG, span.debug)
                .set(ZIPKIN_SPANS.START_TS, timestamp)
                .set(ZIPKIN_SPANS.DURATION, span.duration);

        if (span.traceIdHigh != 0 && schema.hasTraceIdHigh) {
          insertSpan.set(ZIPKIN_SPANS.TRACE_ID_HIGH, span.traceIdHigh);
        }

        inserts.add(
            updateFields.isEmpty()
                ? insertSpan.onDuplicateKeyIgnore()
                : insertSpan.onDuplicateKeyUpdate().set(updateFields));

        for (Annotation annotation : span.annotations) {
          InsertSetMoreStep<Record> insert =
              create
                  .insertInto(ZIPKIN_ANNOTATIONS)
                  .set(ZIPKIN_ANNOTATIONS.TRACE_ID, span.traceId)
                  .set(ZIPKIN_ANNOTATIONS.SPAN_ID, span.id)
                  .set(ZIPKIN_ANNOTATIONS.A_KEY, annotation.value)
                  .set(ZIPKIN_ANNOTATIONS.A_TYPE, -1)
                  .set(ZIPKIN_ANNOTATIONS.A_TIMESTAMP, annotation.timestamp);
          if (span.traceIdHigh != 0 && schema.hasTraceIdHigh) {
            insert.set(ZIPKIN_ANNOTATIONS.TRACE_ID_HIGH, span.traceIdHigh);
          }
          if (annotation.endpoint != null) {
            insert.set(ZIPKIN_ANNOTATIONS.ENDPOINT_SERVICE_NAME, annotation.endpoint.serviceName);
            insert.set(ZIPKIN_ANNOTATIONS.ENDPOINT_IPV4, annotation.endpoint.ipv4);
            if (annotation.endpoint.ipv6 != null && schema.hasIpv6) {
              insert.set(ZIPKIN_ANNOTATIONS.ENDPOINT_IPV6, annotation.endpoint.ipv6);
            }
            insert.set(ZIPKIN_ANNOTATIONS.ENDPOINT_PORT, annotation.endpoint.port);
          }
          inserts.add(insert.onDuplicateKeyIgnore());
        }

        for (BinaryAnnotation annotation : span.binaryAnnotations) {
          InsertSetMoreStep<Record> insert =
              create
                  .insertInto(ZIPKIN_ANNOTATIONS)
                  .set(ZIPKIN_ANNOTATIONS.TRACE_ID, span.traceId)
                  .set(ZIPKIN_ANNOTATIONS.SPAN_ID, span.id)
                  .set(ZIPKIN_ANNOTATIONS.A_KEY, annotation.key)
                  .set(ZIPKIN_ANNOTATIONS.A_VALUE, annotation.value)
                  .set(ZIPKIN_ANNOTATIONS.A_TYPE, annotation.type.value)
                  .set(ZIPKIN_ANNOTATIONS.A_TIMESTAMP, timestamp);
          if (span.traceIdHigh != 0 && schema.hasTraceIdHigh) {
            insert.set(ZIPKIN_ANNOTATIONS.TRACE_ID_HIGH, span.traceIdHigh);
          }
          if (annotation.endpoint != null) {
            insert.set(ZIPKIN_ANNOTATIONS.ENDPOINT_SERVICE_NAME, annotation.endpoint.serviceName);
            insert.set(ZIPKIN_ANNOTATIONS.ENDPOINT_IPV4, annotation.endpoint.ipv4);
            if (annotation.endpoint.ipv6 != null && schema.hasIpv6) {
              insert.set(ZIPKIN_ANNOTATIONS.ENDPOINT_IPV6, annotation.endpoint.ipv6);
            }
            insert.set(ZIPKIN_ANNOTATIONS.ENDPOINT_PORT, annotation.endpoint.port);
          }
          inserts.add(insert.onDuplicateKeyIgnore());
        }
      }
      create.batch(inserts).execute();
    } catch (SQLException e) {
      throw new RuntimeException(e); // TODO
    }
  }
  @Override
  public void update() throws IOException {
    ResourceFileLocation location = getLocation("KEGG Reaction");
    HSQLDBLocation connection = connection();
    try {
      Hsqldb.createReactionSchema(connection.getConnection());
      DSLContext create = DSL.using(connection.getConnection(), HSQLDB);

      Set<String> compoundIds = Sets.newHashSetWithExpectedSize(10000);

      InsertValuesStep2<?, String, String> reactionInsert =
          create.insertInto(REACTION, REACTION.ACCESSION, REACTION.EC);
      InsertValuesStep1<?, String> compoundInsert = create.insertInto(COMPOUND, COMPOUND.ACCESSION);

      List<String[]> reactants = new ArrayList<String[]>(10000);
      List<String[]> products = new ArrayList<String[]>(10000);

      KEGGReactionParser parser =
          new KEGGReactionParser(
              location.open(), KEGGField.ENTRY, KEGGField.EQUATION, KEGGField.ENZYME);
      Map<KEGGField, StringBuilder> entry;
      while ((entry = parser.readNext()) != null) {

        if (isCancelled()) break;

        String equation = entry.get(KEGGField.EQUATION).toString();
        String ec =
            entry.containsKey(KEGGField.ENZYME)
                ? entry.get(KEGGField.ENZYME).toString().trim()
                : "";
        String[] sides = equation.split("<=>");

        String[][] left = getParticipants(sides[0]);
        String[][] right = getParticipants(sides[1]);

        Matcher matcher = ACCESSION.matcher(entry.get(KEGGField.ENTRY).toString());

        if (!ec.isEmpty()) ec = ec.split("\\s+")[0].trim();

        if (matcher.find()) {
          String accession = matcher.group(1);
          reactionInsert.values(accession, ec);

          for (String[] participant : left) {
            String cid = participant[1];
            if (compoundIds.add(cid)) compoundInsert.values(cid);
            participant = Arrays.copyOf(participant, 3);
            participant[2] = accession;
            reactants.add(participant);
          }
          for (String[] participant : right) {
            String cid = participant[1];
            if (compoundIds.add(cid)) compoundInsert.values(cid);
            participant = Arrays.copyOf(participant, 3);
            participant[2] = accession;
            products.add(participant);
          }
        }
      }

      // do the inserts
      fireProgressUpdate("inserting reactions and compounds");
      reactionInsert.execute();
      compoundInsert.execute();

      fireProgressUpdate("inserting reaction relations");

      for (int i = 0, end = reactants.size() - 1; i <= end; i++) {

        String[] participant = reactants.get(i);
        double coef = Double.parseDouble(participant[0]);
        String cid = participant[1];
        String acc = participant[2];
        create
            .insertInto(REACTANT)
            .set(REACTANT.COEFFICIENT, coef)
            .set(
                REACTANT.COMPOUND_ID,
                create.select(COMPOUND.ID).from(COMPOUND).where(COMPOUND.ACCESSION.eq(cid)))
            .set(
                REACTANT.REACTION_ID,
                create.select(REACTION.ID).from(REACTION).where(REACTION.ACCESSION.eq(acc)))
            .execute();
      }

      for (int i = 0, end = products.size() - 1; i <= end; i++) {

        String[] participant = products.get(i);
        double coef = Double.parseDouble(participant[0]);
        String cid = participant[1];
        String acc = participant[2];
        create
            .insertInto(PRODUCT)
            .set(PRODUCT.COEFFICIENT, coef)
            .set(
                PRODUCT.COMPOUND_ID,
                create.select(COMPOUND.ID).from(COMPOUND).where(COMPOUND.ACCESSION.eq(cid)))
            .set(
                PRODUCT.REACTION_ID,
                create.select(REACTION.ID).from(REACTION).where(REACTION.ACCESSION.eq(acc)))
            .execute();
      }

    } catch (SQLException e) {
      throw new IOException(e);
    } finally {
      location.close();
      try {
        connection.commit();
      } catch (SQLException e) {
        System.err.println(e.getMessage());
      } finally {
        try {
          connection.close();
        } catch (SQLException e) {
        }
      }
    }
  }