@SuppressWarnings("unchecked")
  @Test
  public void testAdditionalParameters() {

    Map<String, Object> requestParameters = new LinkedHashMap<String, Object>();
    requestParameters.put("node", "root");
    requestParameters.put("foo", "foo");
    requestParameters.put("today", ISODateTimeFormat.date().print(new LocalDate()));

    List<Node> nodes =
        (List<Node>)
            ControllerUtil.sendAndReceive(
                mockMvc,
                "remoteProviderTreeLoad",
                "method2",
                new TypeReference<List<Node>>() {
                  /* nothinghere */
                },
                requestParameters);

    String appendix = ":foo;" + new LocalDate().toString();
    assertThat(nodes)
        .hasSize(5)
        .containsSequence(
            new Node("n1", "Node 1" + appendix, false),
            new Node("n2", "Node 2" + appendix, false),
            new Node("n3", "Node 3" + appendix, false),
            new Node("n4", "Node 4" + appendix, false),
            new Node("n5", "Node 5" + appendix, false));

    requestParameters = new LinkedHashMap<String, Object>();
    requestParameters.put("node", "root");
    requestParameters.put("today", ISODateTimeFormat.date().print(new LocalDate().plusDays(10)));

    nodes =
        (List<Node>)
            ControllerUtil.sendAndReceive(
                mockMvc,
                "remoteProviderTreeLoad",
                "method2",
                new TypeReference<List<Node>>() {
                  /* nothinghere */
                },
                requestParameters);

    appendix = ":defaultValue;" + new LocalDate().plusDays(10).toString();
    assertThat(nodes)
        .hasSize(5)
        .containsSequence(
            new Node("n1", "Node 1" + appendix, false),
            new Node("n2", "Node 2" + appendix, false),
            new Node("n3", "Node 3" + appendix, false),
            new Node("n4", "Node 4" + appendix, false),
            new Node("n5", "Node 5" + appendix, false));
  }
Пример #2
0
/** Gson TypeAdapter for Joda LocalDate type */
class LocalDateTypeAdapter extends TypeAdapter<LocalDate> {

  private final DateTimeFormatter formatter = ISODateTimeFormat.date();

  @Override
  public void write(JsonWriter out, LocalDate date) throws IOException {
    if (date == null) {
      out.nullValue();
    } else {
      out.value(formatter.print(date));
    }
  }

  @Override
  public LocalDate read(JsonReader in) throws IOException {
    switch (in.peek()) {
      case NULL:
        in.nextNull();
        return null;
      default:
        String date = in.nextString();
        return formatter.parseLocalDate(date);
    }
  }
}
Пример #3
0
  /**
   * @param isNullable isNullable
   * @param earliest lower boundary date
   * @param latest upper boundary date
   * @param onlyBusinessDays only business days
   * @return a list of boundary dates
   */
  public List<String> positiveCase(
      boolean isNullable, String earliest, String latest, boolean onlyBusinessDays) {
    List<String> values = new LinkedList<>();

    if (earliest.equalsIgnoreCase(latest)) {
      values.add(earliest);
      if (isNullable) {
        values.add("");
      }
      return values;
    }

    DateTimeFormatter parser = ISODateTimeFormat.date();
    DateTime earlyDate = parser.parseDateTime(earliest);
    DateTime lateDate = parser.parseDateTime(latest);

    String earlyDay = parser.print(earlyDate);
    String nextDay = getNextDay(earlyDate.toString().substring(0, 10), onlyBusinessDays);
    String prevDay = getPreviousDay(lateDate.toString().substring(0, 10), onlyBusinessDays);
    String lateDay = parser.print(lateDate);

    values.add(earlyDay);
    values.add(nextDay);
    values.add(prevDay);
    values.add(lateDay);

    if (isNullable) {
      values.add("");
    }
    return values;
  }
public final class DateMidnightSerializer extends JodaSerializerBase<DateMidnight> {
  static final DateTimeFormatter format = ISODateTimeFormat.date();

  public DateMidnightSerializer() {
    super(DateMidnight.class);
  }

  @Override
  public void serialize(DateMidnight dt, JsonGenerator jgen, SerializerProvider provider)
      throws IOException, JsonGenerationException {
    if (provider.isEnabled(SerializationFeature.WRITE_DATES_AS_TIMESTAMPS)) {
      // same as with other date-only values
      jgen.writeStartArray();
      jgen.writeNumber(dt.year().get());
      jgen.writeNumber(dt.monthOfYear().get());
      jgen.writeNumber(dt.dayOfMonth().get());
      jgen.writeEndArray();
    } else {
      jgen.writeString(format.print(dt));
    }
  }

  @Override
  public JsonNode getSchema(SerializerProvider provider, java.lang.reflect.Type typeHint) {
    return createSchemaNode(
        provider.isEnabled(SerializationFeature.WRITE_DATES_AS_TIMESTAMPS) ? "array" : "string",
        true);
  }
}
Пример #5
0
  /**
   * Grab random holiday from the equivalence class that falls between the two dates
   *
   * @param earliest the earliest date parameter as defined in the model
   * @param latest the latest date parameter as defined in the model
   * @return a holiday that falls between the dates
   */
  public String getRandomHoliday(String earliest, String latest) {
    String dateString = "";
    DateTimeFormatter parser = ISODateTimeFormat.date();
    DateTime earlyDate = parser.parseDateTime(earliest);
    DateTime lateDate = parser.parseDateTime(latest);
    List<Holiday> holidays = new LinkedList<>();

    int min = Integer.parseInt(earlyDate.toString().substring(0, 4));
    int max = Integer.parseInt(lateDate.toString().substring(0, 4));
    int range = max - min + 1;
    int randomYear = (int) (Math.random() * range) + min;

    for (Holiday s : EquivalenceClassTransformer.HOLIDAYS) {
      holidays.add(s);
    }
    Collections.shuffle(holidays);

    for (Holiday holiday : holidays) {
      dateString = convertToReadableDate(holiday.forYear(randomYear));
      if (toDate(dateString).after(toDate(earliest)) && toDate(dateString).before(toDate(latest))) {
        break;
      }
    }
    return dateString;
  }
  @Test
  public void testWithConversion() throws IOException {

    DateTime today = new DateTime();

    Map<String, Object> resultMap =
        (Map<String, Object>)
            ControllerUtil.sendAndReceive(
                controller,
                "remoteProviderSimple",
                "method14",
                a(
                    ISODateTimeFormat.dateTime().print(today),
                    "normalParameter",
                    ISODateTimeFormat.date().print(today),
                    "99.9%"),
                Map.class);

    assertThat(resultMap.get("endDate")).isEqualTo(today.getMillis());
    ObjectMapper mapper = new ObjectMapper();

    List<Object> expectedValue =
        mapper.readValue(mapper.writeValueAsString(today.toLocalDate()), List.class);
    Object actualValue = resultMap.get("jodaLocalDate");

    assertThat((List<Object>) resultMap.get("jodaLocalDate")).isEqualTo(expectedValue);
    assertThat(resultMap.get("percent")).isEqualTo(0.999);
    assertThat(resultMap.get("normalParameter")).isEqualTo("normalParameter");
    assertThat(resultMap.get("remoteAddr")).isEqualTo("127.0.0.1");
  }
Пример #7
0
  /**
   * Takes a date, and retrieves the next business day
   *
   * @param dateString the date
   * @param onlyBusinessDays only business days
   * @return a string containing the next business day
   */
  public String getNextDay(String dateString, boolean onlyBusinessDays) {
    DateTimeFormatter parser = ISODateTimeFormat.date();
    DateTime date = parser.parseDateTime(dateString).plusDays(1);
    Calendar cal = Calendar.getInstance();
    cal.setTime(date.toDate());

    if (onlyBusinessDays) {
      if (cal.get(Calendar.DAY_OF_WEEK) == 1
          || cal.get(Calendar.DAY_OF_WEEK) == 7
          || isHoliday(date.toString().substring(0, 10))) {
        return getNextDay(date.toString().substring(0, 10), true);
      } else {
        return parser.print(date);
      }
    } else {
      return parser.print(date);
    }
  }
Пример #8
0
  /**
   * Convert the holiday format from EquivalenceClassTransformer into a date format
   *
   * @param holiday the date
   * @return a date String in the format yyyy-MM-dd
   */
  public String convertToReadableDate(Holiday holiday) {
    DateTimeFormatter parser = ISODateTimeFormat.date();

    if (holiday.isInDateForm()) {
      String month =
          Integer.toString(holiday.getMonth()).length() < 2
              ? "0" + holiday.getMonth()
              : Integer.toString(holiday.getMonth());
      String day =
          Integer.toString(holiday.getDayOfMonth()).length() < 2
              ? "0" + holiday.getDayOfMonth()
              : Integer.toString(holiday.getDayOfMonth());
      return holiday.getYear() + "-" + month + "-" + day;
    } else {
      /*
       * 5 denotes the final occurrence of the day in the month. Need to find actual
       * number of occurrences
       */
      if (holiday.getOccurrence() == 5) {
        holiday.setOccurrence(
            numOccurrences(holiday.getYear(), holiday.getMonth(), holiday.getDayOfWeek()));
      }

      DateTime date =
          parser.parseDateTime(holiday.getYear() + "-" + holiday.getMonth() + "-" + "01");
      Calendar calendar = Calendar.getInstance();
      calendar.setTime(date.toDate());
      int count = 0;

      while (count < holiday.getOccurrence()) {
        if (calendar.get(Calendar.DAY_OF_WEEK) == holiday.getDayOfWeek()) {
          count++;
          if (count == holiday.getOccurrence()) {
            break;
          }
        }
        date = date.plusDays(1);
        calendar.setTime(date.toDate());
      }
      return date.toString().substring(0, 10);
    }
  }
Пример #9
0
  /**
   * @param isNullable isNullable
   * @param earliest lower boundary date
   * @param latest upper boundary date
   * @return a list of boundary dates
   */
  public List<String> negativeCase(boolean isNullable, String earliest, String latest) {
    List<String> values = new LinkedList<>();

    DateTimeFormatter parser = ISODateTimeFormat.date();
    DateTime earlyDate = parser.parseDateTime(earliest);
    DateTime lateDate = parser.parseDateTime(latest);

    String prevDay = parser.print(earlyDate.minusDays(1));
    String nextDay = parser.print(lateDate.plusDays(1));

    values.add(prevDay);
    values.add(nextDay);
    values.add(
        nextDay.substring(5, 7) + "-" + nextDay.substring(8, 10) + "-" + nextDay.substring(0, 4));
    values.add(getRandomHoliday(earliest, latest));

    if (!isNullable) {
      values.add("");
    }
    return values;
  }
Пример #10
0
  /**
   * Given a year, month, and day, find the number of occurrences of that day in the month
   *
   * @param year the year
   * @param month the month
   * @param day the day
   * @return the number of occurrences of the day in the month
   */
  public int numOccurrences(int year, int month, int day) {
    DateTimeFormatter parser = ISODateTimeFormat.date();
    DateTime date = parser.parseDateTime(year + "-" + month + "-" + "01");
    Calendar cal = Calendar.getInstance();
    cal.setTime(date.toDate());
    GregorianChronology calendar = GregorianChronology.getInstance();
    DateTimeField field = calendar.dayOfMonth();

    int days = 0;
    int count = 0;
    int num = field.getMaximumValue(new LocalDate(year, month, day, calendar));
    while (days < num) {
      if (cal.get(Calendar.DAY_OF_WEEK) == day) {
        count++;
      }
      date = date.plusDays(1);
      cal.setTime(date.toDate());

      days++;
    }
    return count;
  }
Пример #11
0
public class CassandraRecordSink implements RecordSink {
  private static final DateTimeFormatter DATE_FORMATTER = ISODateTimeFormat.date().withZoneUTC();

  private final int fieldCount;
  private final CassandraSession cassandraSession;
  private final String insertQuery;
  private final List<Object> values;
  private final String schemaName;
  private final List<Type> columnTypes;
  private int field = -1;

  @Inject
  public CassandraRecordSink(CassandraOutputTableHandle handle, CassandraSession cassandraSession) {
    this.fieldCount = requireNonNull(handle, "handle is null").getColumnNames().size();
    this.cassandraSession = requireNonNull(cassandraSession, "cassandraSession is null");

    schemaName = handle.getSchemaName();
    StringBuilder queryBuilder =
        new StringBuilder(
            String.format("INSERT INTO \"%s\".\"%s\"(", schemaName, handle.getTableName()));
    queryBuilder.append("id");

    for (String columnName : handle.getColumnNames()) {
      queryBuilder.append(",").append(columnName);
    }
    queryBuilder.append(") VALUES (?");

    for (int i = 0; i < handle.getColumnNames().size(); i++) {
      queryBuilder.append(",?");
    }
    queryBuilder.append(")");

    insertQuery = queryBuilder.toString();
    values = new ArrayList<>();

    columnTypes = handle.getColumnTypes();
  }

  @Override
  public void beginRecord() {
    checkState(field == -1, "already in record");

    field = 0;
    values.clear();
    values.add(UUID.randomUUID());
  }

  @Override
  public void finishRecord() {
    checkState(field != -1, "not in record");
    checkState(field == fieldCount, "not all fields set");
    field = -1;
    cassandraSession.execute(schemaName, insertQuery, values.toArray());
  }

  @Override
  public void appendNull() {
    append(null);
  }

  @Override
  public void appendBoolean(boolean value) {
    append(value);
  }

  @Override
  public void appendLong(long value) {
    if (DATE.equals(columnTypes.get(field))) {
      append(DATE_FORMATTER.print(TimeUnit.DAYS.toMillis(value)));
    } else if (INTEGER.equals(columnTypes.get(field))) {
      append(((Number) value).intValue());
    } else if (REAL.equals(columnTypes.get(field))) {
      append(intBitsToFloat((int) value));
    } else {
      append(value);
    }
  }

  @Override
  public void appendDouble(double value) {
    append(value);
  }

  @Override
  public void appendString(byte[] value) {
    append(new String(value, UTF_8));
  }

  @Override
  public void appendObject(Object value) {
    throw new UnsupportedOperationException();
  }

  @Override
  public Collection<Slice> commit() {
    checkState(field == -1, "record not finished");
    // the committer does not need any additional info
    return ImmutableList.of();
  }

  @Override
  public void rollback() {}

  @Override
  public List<Type> getColumnTypes() {
    return columnTypes;
  }

  private void append(Object value) {
    checkState(field != -1, "not in record");
    checkState(field < fieldCount, "all fields already set");
    values.add(value);
    field++;
  }
}
Пример #12
0
public final class HiveUtil {
  public static final String PRESTO_VIEW_FLAG = "presto_view";

  private static final String VIEW_PREFIX = "/* Presto View: ";
  private static final String VIEW_SUFFIX = " */";

  private static final DateTimeFormatter HIVE_DATE_PARSER = ISODateTimeFormat.date().withZoneUTC();
  private static final DateTimeFormatter HIVE_TIMESTAMP_PARSER;

  private static final Pattern SUPPORTED_DECIMAL_TYPE =
      Pattern.compile(DECIMAL_TYPE_NAME + "\\((\\d+),(\\d+)\\)");
  private static final int DECIMAL_PRECISION_GROUP = 1;
  private static final int DECIMAL_SCALE_GROUP = 2;

  private static final String BIG_DECIMAL_POSTFIX = "BD";

  static {
    DateTimeParser[] timestampWithoutTimeZoneParser = {
      DateTimeFormat.forPattern("yyyy-M-d").getParser(),
      DateTimeFormat.forPattern("yyyy-M-d H:m").getParser(),
      DateTimeFormat.forPattern("yyyy-M-d H:m:s").getParser(),
      DateTimeFormat.forPattern("yyyy-M-d H:m:s.SSS").getParser(),
      DateTimeFormat.forPattern("yyyy-M-d H:m:s.SSSSSSS").getParser(),
      DateTimeFormat.forPattern("yyyy-M-d H:m:s.SSSSSSSSS").getParser(),
    };
    DateTimePrinter timestampWithoutTimeZonePrinter =
        DateTimeFormat.forPattern("yyyy-MM-dd HH:mm:ss.SSSSSSSSS").getPrinter();
    HIVE_TIMESTAMP_PARSER =
        new DateTimeFormatterBuilder()
            .append(timestampWithoutTimeZonePrinter, timestampWithoutTimeZoneParser)
            .toFormatter()
            .withZoneUTC();
  }

  private HiveUtil() {}

  public static RecordReader<?, ?> createRecordReader(
      Configuration configuration,
      Path path,
      long start,
      long length,
      Properties schema,
      List<HiveColumnHandle> columns) {
    // determine which hive columns we will read
    List<HiveColumnHandle> readColumns =
        ImmutableList.copyOf(filter(columns, column -> column.getColumnType() == REGULAR));
    List<Integer> readHiveColumnIndexes =
        ImmutableList.copyOf(transform(readColumns, HiveColumnHandle::getHiveColumnIndex));

    // Tell hive the columns we would like to read, this lets hive optimize reading column oriented
    // files
    setReadColumns(configuration, readHiveColumnIndexes);

    InputFormat<?, ?> inputFormat = getInputFormat(configuration, schema, true);
    JobConf jobConf = new JobConf(configuration);
    FileSplit fileSplit = new FileSplit(path, start, length, (String[]) null);

    // propagate serialization configuration to getRecordReader
    schema
        .stringPropertyNames()
        .stream()
        .filter(name -> name.startsWith("serialization."))
        .forEach(name -> jobConf.set(name, schema.getProperty(name)));

    try {
      return retry()
          .stopOnIllegalExceptions()
          .run(
              "createRecordReader",
              () -> inputFormat.getRecordReader(fileSplit, jobConf, Reporter.NULL));
    } catch (Exception e) {
      throw new PrestoException(
          HIVE_CANNOT_OPEN_SPLIT,
          format(
              "Error opening Hive split %s (offset=%s, length=%s) using %s: %s",
              path, start, length, getInputFormatName(schema), e.getMessage()),
          e);
    }
  }

  public static void setReadColumns(
      Configuration configuration, List<Integer> readHiveColumnIndexes) {
    configuration.set(READ_COLUMN_IDS_CONF_STR, Joiner.on(',').join(readHiveColumnIndexes));
    configuration.setBoolean(READ_ALL_COLUMNS, false);
  }

  static InputFormat<?, ?> getInputFormat(
      Configuration configuration, Properties schema, boolean symlinkTarget) {
    String inputFormatName = getInputFormatName(schema);
    try {
      JobConf jobConf = new JobConf(configuration);

      Class<? extends InputFormat<?, ?>> inputFormatClass =
          getInputFormatClass(jobConf, inputFormatName);
      if (symlinkTarget && (inputFormatClass == SymlinkTextInputFormat.class)) {
        // symlink targets are always TextInputFormat
        inputFormatClass = TextInputFormat.class;
      }

      return ReflectionUtils.newInstance(inputFormatClass, jobConf);
    } catch (ClassNotFoundException | RuntimeException e) {
      throw new RuntimeException("Unable to create input format " + inputFormatName, e);
    }
  }

  @SuppressWarnings({"unchecked", "RedundantCast"})
  private static Class<? extends InputFormat<?, ?>> getInputFormatClass(
      JobConf conf, String inputFormatName) throws ClassNotFoundException {
    // CDH uses different names for Parquet
    if ("parquet.hive.DeprecatedParquetInputFormat".equals(inputFormatName)
        || "parquet.hive.MapredParquetInputFormat".equals(inputFormatName)) {
      return MapredParquetInputFormat.class;
    }

    Class<?> clazz = conf.getClassByName(inputFormatName);
    // TODO: remove redundant cast to Object after IDEA-118533 is fixed
    return (Class<? extends InputFormat<?, ?>>) (Object) clazz.asSubclass(InputFormat.class);
  }

  static String getInputFormatName(Properties schema) {
    String name = schema.getProperty(FILE_INPUT_FORMAT);
    checkCondition(
        name != null,
        HIVE_INVALID_METADATA,
        "Table or partition is missing Hive input format property: %s",
        FILE_INPUT_FORMAT);
    return name;
  }

  public static long parseHiveDate(String value) {
    long millis = HIVE_DATE_PARSER.parseMillis(value);
    return TimeUnit.MILLISECONDS.toDays(millis);
  }

  public static long parseHiveTimestamp(String value, DateTimeZone timeZone) {
    return HIVE_TIMESTAMP_PARSER.withZone(timeZone).parseMillis(value);
  }

  static boolean isSplittable(InputFormat<?, ?> inputFormat, FileSystem fileSystem, Path path) {
    // ORC uses a custom InputFormat but is always splittable
    if (inputFormat.getClass().getSimpleName().equals("OrcInputFormat")) {
      return true;
    }

    // use reflection to get isSplittable method on FileInputFormat
    Method method = null;
    for (Class<?> clazz = inputFormat.getClass(); clazz != null; clazz = clazz.getSuperclass()) {
      try {
        method = clazz.getDeclaredMethod("isSplitable", FileSystem.class, Path.class);
        break;
      } catch (NoSuchMethodException ignored) {
      }
    }

    if (method == null) {
      return false;
    }
    try {
      method.setAccessible(true);
      return (boolean) method.invoke(inputFormat, fileSystem, path);
    } catch (InvocationTargetException | IllegalAccessException e) {
      throw Throwables.propagate(e);
    }
  }

  public static StructObjectInspector getTableObjectInspector(Properties schema) {
    return getTableObjectInspector(getDeserializer(schema));
  }

  public static StructObjectInspector getTableObjectInspector(
      @SuppressWarnings("deprecation") Deserializer deserializer) {
    try {
      ObjectInspector inspector = deserializer.getObjectInspector();
      checkArgument(
          inspector.getCategory() == Category.STRUCT,
          "expected STRUCT: %s",
          inspector.getCategory());
      return (StructObjectInspector) inspector;
    } catch (SerDeException e) {
      throw Throwables.propagate(e);
    }
  }

  public static List<? extends StructField> getTableStructFields(Table table) {
    return getTableObjectInspector(getHiveSchema(table)).getAllStructFieldRefs();
  }

  public static boolean isDeserializerClass(Properties schema, Class<?> deserializerClass) {
    return getDeserializerClassName(schema).equals(deserializerClass.getName());
  }

  public static String getDeserializerClassName(Properties schema) {
    String name = schema.getProperty(SERIALIZATION_LIB);
    checkCondition(
        name != null,
        HIVE_INVALID_METADATA,
        "Table or partition is missing Hive deserializer property: %s",
        SERIALIZATION_LIB);
    return name;
  }

  @SuppressWarnings("deprecation")
  public static Deserializer getDeserializer(Properties schema) {
    String name = getDeserializerClassName(schema);

    Deserializer deserializer = createDeserializer(getDeserializerClass(name));
    initializeDeserializer(deserializer, schema);
    return deserializer;
  }

  @SuppressWarnings("deprecation")
  private static Class<? extends Deserializer> getDeserializerClass(String name) {
    // CDH uses different names for Parquet
    if ("parquet.hive.serde.ParquetHiveSerDe".equals(name)) {
      return ParquetHiveSerDe.class;
    }

    try {
      return Class.forName(name, true, JavaUtils.getClassLoader()).asSubclass(Deserializer.class);
    } catch (ClassNotFoundException e) {
      throw new PrestoException(HIVE_SERDE_NOT_FOUND, "deserializer does not exist: " + name);
    } catch (ClassCastException e) {
      throw new RuntimeException("invalid deserializer class: " + name);
    }
  }

  @SuppressWarnings("deprecation")
  private static Deserializer createDeserializer(Class<? extends Deserializer> clazz) {
    try {
      return clazz.getConstructor().newInstance();
    } catch (ReflectiveOperationException e) {
      throw new RuntimeException("error creating deserializer: " + clazz.getName(), e);
    }
  }

  @SuppressWarnings("deprecation")
  private static void initializeDeserializer(Deserializer deserializer, Properties schema) {
    try {
      deserializer.initialize(new Configuration(false), schema);
    } catch (SerDeException e) {
      throw new RuntimeException(
          "error initializing deserializer: " + deserializer.getClass().getName());
    }
  }

  public static boolean isHiveNull(byte[] bytes) {
    return bytes.length == 2 && bytes[0] == '\\' && bytes[1] == 'N';
  }

  public static NullableValue parsePartitionValue(
      String partitionName, String value, Type type, DateTimeZone timeZone) {
    boolean isNull = HIVE_DEFAULT_DYNAMIC_PARTITION.equals(value);

    if (type instanceof DecimalType) {
      DecimalType decimalType = (DecimalType) type;
      if (isNull) {
        return NullableValue.asNull(decimalType);
      }
      if (decimalType.isShort()) {
        if (value.isEmpty()) {
          return NullableValue.of(decimalType, 0L);
        }
        return NullableValue.of(
            decimalType, shortDecimalPartitionKey(value, decimalType, partitionName));
      } else {
        if (value.isEmpty()) {
          return NullableValue.of(decimalType, Decimals.encodeUnscaledValue(BigInteger.ZERO));
        }
        return NullableValue.of(
            decimalType, longDecimalPartitionKey(value, decimalType, partitionName));
      }
    }

    if (BOOLEAN.equals(type)) {
      if (isNull) {
        return NullableValue.asNull(BOOLEAN);
      }
      if (value.isEmpty()) {
        return NullableValue.of(BOOLEAN, false);
      }
      return NullableValue.of(BOOLEAN, booleanPartitionKey(value, partitionName));
    }

    if (TINYINT.equals(type)) {
      if (isNull) {
        return NullableValue.asNull(TINYINT);
      }
      if (value.isEmpty()) {
        return NullableValue.of(TINYINT, 0L);
      }
      return NullableValue.of(TINYINT, tinyintPartitionKey(value, partitionName));
    }

    if (SMALLINT.equals(type)) {
      if (isNull) {
        return NullableValue.asNull(SMALLINT);
      }
      if (value.isEmpty()) {
        return NullableValue.of(SMALLINT, 0L);
      }
      return NullableValue.of(SMALLINT, smallintPartitionKey(value, partitionName));
    }

    if (INTEGER.equals(type)) {
      if (isNull) {
        return NullableValue.asNull(INTEGER);
      }
      if (value.isEmpty()) {
        return NullableValue.of(INTEGER, 0L);
      }
      return NullableValue.of(INTEGER, integerPartitionKey(value, partitionName));
    }

    if (BIGINT.equals(type)) {
      if (isNull) {
        return NullableValue.asNull(BIGINT);
      }
      if (value.isEmpty()) {
        return NullableValue.of(BIGINT, 0L);
      }
      return NullableValue.of(BIGINT, bigintPartitionKey(value, partitionName));
    }

    if (DATE.equals(type)) {
      if (isNull) {
        return NullableValue.asNull(DATE);
      }
      return NullableValue.of(DATE, datePartitionKey(value, partitionName));
    }

    if (TIMESTAMP.equals(type)) {
      if (isNull) {
        return NullableValue.asNull(TIMESTAMP);
      }
      return NullableValue.of(TIMESTAMP, timestampPartitionKey(value, timeZone, partitionName));
    }

    if (REAL.equals(type)) {
      if (isNull) {
        return NullableValue.asNull(REAL);
      }
      if (value.isEmpty()) {
        return NullableValue.of(REAL, (long) floatToRawIntBits(0.0f));
      }
      return NullableValue.of(REAL, floatPartitionKey(value, partitionName));
    }

    if (DOUBLE.equals(type)) {
      if (isNull) {
        return NullableValue.asNull(DOUBLE);
      }
      if (value.isEmpty()) {
        return NullableValue.of(DOUBLE, 0.0);
      }
      return NullableValue.of(DOUBLE, doublePartitionKey(value, partitionName));
    }

    if (type instanceof VarcharType) {
      if (isNull) {
        return NullableValue.asNull(type);
      }
      return NullableValue.of(type, varcharPartitionKey(value, partitionName, type));
    }

    if (isCharType(type)) {
      if (isNull) {
        return NullableValue.asNull(type);
      }
      return NullableValue.of(type, charPartitionKey(value, partitionName, type));
    }

    throw new PrestoException(
        NOT_SUPPORTED, format("Unsupported Type [%s] for partition: %s", type, partitionName));
  }

  public static boolean isPrestoView(Table table) {
    return "true".equals(table.getParameters().get(PRESTO_VIEW_FLAG));
  }

  public static String encodeViewData(String data) {
    return VIEW_PREFIX + Base64.getEncoder().encodeToString(data.getBytes(UTF_8)) + VIEW_SUFFIX;
  }

  public static String decodeViewData(String data) {
    checkCondition(
        data.startsWith(VIEW_PREFIX), HIVE_INVALID_VIEW_DATA, "View data missing prefix: %s", data);
    checkCondition(
        data.endsWith(VIEW_SUFFIX), HIVE_INVALID_VIEW_DATA, "View data missing suffix: %s", data);
    data = data.substring(VIEW_PREFIX.length());
    data = data.substring(0, data.length() - VIEW_SUFFIX.length());
    return new String(Base64.getDecoder().decode(data), UTF_8);
  }

  public static Optional<DecimalType> getDecimalType(HiveType hiveType) {
    return getDecimalType(hiveType.getHiveTypeName());
  }

  public static Optional<DecimalType> getDecimalType(String hiveTypeName) {
    Matcher matcher = SUPPORTED_DECIMAL_TYPE.matcher(hiveTypeName);
    if (matcher.matches()) {
      int precision = parseInt(matcher.group(DECIMAL_PRECISION_GROUP));
      int scale = parseInt(matcher.group(DECIMAL_SCALE_GROUP));
      return Optional.of(createDecimalType(precision, scale));
    } else {
      return Optional.empty();
    }
  }

  public static boolean isArrayType(Type type) {
    return type.getTypeSignature().getBase().equals(StandardTypes.ARRAY);
  }

  public static boolean isMapType(Type type) {
    return type.getTypeSignature().getBase().equals(StandardTypes.MAP);
  }

  public static boolean isRowType(Type type) {
    return type.getTypeSignature().getBase().equals(StandardTypes.ROW);
  }

  public static boolean isStructuralType(Type type) {
    String baseName = type.getTypeSignature().getBase();
    return baseName.equals(StandardTypes.MAP)
        || baseName.equals(StandardTypes.ARRAY)
        || baseName.equals(StandardTypes.ROW);
  }

  public static boolean isStructuralType(HiveType hiveType) {
    return hiveType.getCategory() == Category.LIST
        || hiveType.getCategory() == Category.MAP
        || hiveType.getCategory() == Category.STRUCT;
  }

  public static boolean booleanPartitionKey(String value, String name) {
    if (value.equalsIgnoreCase("true")) {
      return true;
    }
    if (value.equalsIgnoreCase("false")) {
      return false;
    }
    throw new PrestoException(
        HIVE_INVALID_PARTITION_VALUE,
        format("Invalid partition value '%s' for BOOLEAN partition key: %s", value, name));
  }

  public static long bigintPartitionKey(String value, String name) {
    try {
      return parseLong(value);
    } catch (NumberFormatException e) {
      throw new PrestoException(
          HIVE_INVALID_PARTITION_VALUE,
          format("Invalid partition value '%s' for BIGINT partition key: %s", value, name));
    }
  }

  public static long integerPartitionKey(String value, String name) {
    try {
      return parseInt(value);
    } catch (NumberFormatException e) {
      throw new PrestoException(
          HIVE_INVALID_PARTITION_VALUE,
          format("Invalid partition value '%s' for INTEGER partition key: %s", value, name));
    }
  }

  public static long smallintPartitionKey(String value, String name) {
    try {
      return parseShort(value);
    } catch (NumberFormatException e) {
      throw new PrestoException(
          HIVE_INVALID_PARTITION_VALUE,
          format("Invalid partition value '%s' for SMALLINT partition key: %s", value, name));
    }
  }

  public static long tinyintPartitionKey(String value, String name) {
    try {
      return parseByte(value);
    } catch (NumberFormatException e) {
      throw new PrestoException(
          HIVE_INVALID_PARTITION_VALUE,
          format("Invalid partition value '%s' for TINYINT partition key: %s", value, name));
    }
  }

  public static long floatPartitionKey(String value, String name) {
    try {
      return floatToRawIntBits(parseFloat(value));
    } catch (NumberFormatException e) {
      throw new PrestoException(
          HIVE_INVALID_PARTITION_VALUE,
          format("Invalid partition value '%s' for FLOAT partition key: %s", value, name));
    }
  }

  public static double doublePartitionKey(String value, String name) {
    try {
      return parseDouble(value);
    } catch (NumberFormatException e) {
      throw new PrestoException(
          HIVE_INVALID_PARTITION_VALUE,
          format("Invalid partition value '%s' for DOUBLE partition key: %s", value, name));
    }
  }

  public static long datePartitionKey(String value, String name) {
    try {
      return parseHiveDate(value);
    } catch (IllegalArgumentException e) {
      throw new PrestoException(
          HIVE_INVALID_PARTITION_VALUE,
          format("Invalid partition value '%s' for DATE partition key: %s", value, name));
    }
  }

  public static long timestampPartitionKey(String value, DateTimeZone zone, String name) {
    try {
      return parseHiveTimestamp(value, zone);
    } catch (IllegalArgumentException e) {
      throw new PrestoException(
          HIVE_INVALID_PARTITION_VALUE,
          format("Invalid partition value '%s' for TIMESTAMP partition key: %s", value, name));
    }
  }

  public static long shortDecimalPartitionKey(String value, DecimalType type, String name) {
    return decimalPartitionKey(value, type, name).unscaledValue().longValue();
  }

  public static Slice longDecimalPartitionKey(String value, DecimalType type, String name) {
    return Decimals.encodeUnscaledValue(decimalPartitionKey(value, type, name).unscaledValue());
  }

  private static BigDecimal decimalPartitionKey(String value, DecimalType type, String name) {
    try {
      if (value.endsWith(BIG_DECIMAL_POSTFIX)) {
        value = value.substring(0, value.length() - BIG_DECIMAL_POSTFIX.length());
      }

      BigDecimal decimal = new BigDecimal(value);
      decimal = decimal.setScale(type.getScale(), ROUND_UNNECESSARY);
      if (decimal.precision() > type.getPrecision()) {
        throw new PrestoException(
            HIVE_INVALID_PARTITION_VALUE,
            format(
                "Invalid partition value '%s' for %s partition key: %s",
                value, type.toString(), name));
      }
      return decimal;
    } catch (NumberFormatException e) {
      throw new PrestoException(
          HIVE_INVALID_PARTITION_VALUE,
          format(
              "Invalid partition value '%s' for %s partition key: %s",
              value, type.toString(), name));
    }
  }

  public static Slice varcharPartitionKey(String value, String name, Type columnType) {
    Slice partitionKey = Slices.utf8Slice(value);
    VarcharType varcharType = checkType(columnType, VarcharType.class, "columnType");
    if (SliceUtf8.countCodePoints(partitionKey) > varcharType.getLength()) {
      throw new PrestoException(
          HIVE_INVALID_PARTITION_VALUE,
          format(
              "Invalid partition value '%s' for %s partition key: %s",
              value, columnType.toString(), name));
    }
    return partitionKey;
  }

  public static Slice charPartitionKey(String value, String name, Type columnType) {
    Slice partitionKey = trimSpaces(Slices.utf8Slice(value));
    CharType charType = checkType(columnType, CharType.class, "columnType");
    if (SliceUtf8.countCodePoints(partitionKey) > charType.getLength()) {
      throw new PrestoException(
          HIVE_INVALID_PARTITION_VALUE,
          format(
              "Invalid partition value '%s' for %s partition key: %s",
              value, columnType.toString(), name));
    }
    return partitionKey;
  }

  public static SchemaTableName schemaTableName(ConnectorTableHandle tableHandle) {
    return checkType(tableHandle, HiveTableHandle.class, "tableHandle").getSchemaTableName();
  }

  public static List<HiveColumnHandle> hiveColumnHandles(
      String connectorId, Table table, boolean forceIntegralToBigint) {
    ImmutableList.Builder<HiveColumnHandle> columns = ImmutableList.builder();

    // add the data fields first
    columns.addAll(getRegularColumnHandles(connectorId, table, forceIntegralToBigint));

    // add the partition keys last (like Hive does)
    columns.addAll(getPartitionKeyColumnHandles(connectorId, table, forceIntegralToBigint));

    // add hidden column
    columns.add(pathColumnHandle(connectorId));

    return columns.build();
  }

  public static List<HiveColumnHandle> getRegularColumnHandles(
      String connectorId, Table table, boolean forceIntegralToBigint) {
    ImmutableList.Builder<HiveColumnHandle> columns = ImmutableList.builder();

    int hiveColumnIndex = 0;
    for (Column field : table.getDataColumns()) {
      // ignore unsupported types rather than failing
      HiveType hiveType = field.getType();
      if (hiveType.isSupportedType()) {
        columns.add(
            new HiveColumnHandle(
                connectorId,
                field.getName(),
                hiveType,
                hiveType.getTypeSignature(forceIntegralToBigint),
                hiveColumnIndex,
                REGULAR));
      }
      hiveColumnIndex++;
    }

    return columns.build();
  }

  public static List<HiveColumnHandle> getPartitionKeyColumnHandles(
      String connectorId, Table table, boolean forceIntegralToBigint) {
    ImmutableList.Builder<HiveColumnHandle> columns = ImmutableList.builder();

    List<Column> partitionKeys = table.getPartitionColumns();
    for (Column field : partitionKeys) {
      HiveType hiveType = field.getType();
      if (!hiveType.isSupportedType()) {
        throw new PrestoException(
            NOT_SUPPORTED,
            format(
                "Unsupported Hive type %s found in partition keys of table %s.%s",
                hiveType, table.getDatabaseName(), table.getTableName()));
      }
      columns.add(
          new HiveColumnHandle(
              connectorId,
              field.getName(),
              hiveType,
              hiveType.getTypeSignature(forceIntegralToBigint),
              -1,
              PARTITION_KEY));
    }

    return columns.build();
  }

  public static Slice base64Decode(byte[] bytes) {
    return Slices.wrappedBuffer(Base64.getDecoder().decode(bytes));
  }

  public static void checkCondition(
      boolean condition, ErrorCodeSupplier errorCode, String formatString, Object... args) {
    if (!condition) {
      throw new PrestoException(errorCode, format(formatString, args));
    }
  }

  @Nullable
  public static String annotateColumnComment(Optional<String> comment, boolean partitionKey) {
    String normalizedComment = comment.orElse("").trim();
    if (partitionKey) {
      if (normalizedComment.isEmpty()) {
        normalizedComment = "Partition Key";
      } else {
        normalizedComment = "Partition Key: " + normalizedComment;
      }
    }
    return normalizedComment.isEmpty() ? null : normalizedComment;
  }

  public static List<String> toPartitionValues(String partitionName) {
    // mimics Warehouse.makeValsFromName
    ImmutableList.Builder<String> resultBuilder = ImmutableList.builder();
    int start = 0;
    while (true) {
      while (start < partitionName.length() && partitionName.charAt(start) != '=') {
        start++;
      }
      start++;
      int end = start;
      while (end < partitionName.length() && partitionName.charAt(end) != '/') {
        end++;
      }
      if (start > partitionName.length()) {
        break;
      }
      resultBuilder.add(unescapePathName(partitionName.substring(start, end)));
      start = end + 1;
    }
    return resultBuilder.build();
  }

  public static String getPrefilledColumnValue(
      HiveColumnHandle columnHandle, HivePartitionKey partitionKey, Path path) {
    if (partitionKey != null) {
      return partitionKey.getValue();
    }
    if (isPathColumnHandle(columnHandle)) {
      return path.toString();
    }
    throw new PrestoException(NOT_SUPPORTED, "unsupported hidden column: " + columnHandle);
  }

  public static void closeWithSuppression(RecordCursor recordCursor, Throwable throwable) {
    requireNonNull(recordCursor, "recordCursor is null");
    requireNonNull(throwable, "throwable is null");
    try {
      recordCursor.close();
    } catch (RuntimeException e) {
      // Self-suppression not permitted
      if (throwable != e) {
        throwable.addSuppressed(e);
      }
    }
  }
}
Пример #13
0
 public JsonElement serialize(LocalDate src, Type typeOfSrc, JsonSerializationContext context) {
   DateTimeFormatter fmt = ISODateTimeFormat.date();
   return new JsonPrimitive(fmt.print(src));
 }
Пример #14
0
 public static String getDate() {
   DateTime dt = new DateTime();
   DateTimeFormatter fmt = ISODateTimeFormat.date();
   return (fmt.print(dt));
 }
Пример #15
0
 public void setCountDate(String countDate) throws ParseException {
   this.countDate = new Date(new DateTime(countDate).getMillis());
   this.countDateString = new DateTime(countDate).toString(ISODateTimeFormat.date());
 }
 public String getDate() {
   return ISODateTimeFormat.date().print(getLastUpdated().getTime())
       + " "
       + ISODateTimeFormat.timeNoMillis().print(getLastUpdated().getTime());
 }
Пример #17
0
public class PaPeopleProcessor {

  private static final String PERSON_URI_PREFIX =
      "http://people.atlasapi.org/people.pressassociation.com/";
  private static final String PA_PERSON_URI_PREFIX =
      "http://people.atlasapi.org/pressassociation.com/";

  private static final String BASE_IMAGE_URL =
      "http://images.atlas.metabroadcast.com/people.pressassociation.com/";

  private final PeopleResolver personResolver;
  private final PersonWriter personWriter;
  private final DateTimeFormatter dateTimeFormatter = ISODateTimeFormat.date().withZoneUTC();

  public PaPeopleProcessor(PeopleResolver personResolver, PersonWriter personWriter) {
    this.personResolver = personResolver;
    this.personWriter = personWriter;
  }

  public void process(org.atlasapi.remotesite.pa.profiles.bindings.Person paPerson) {
    Person person = ingestPerson(paPerson);
    Optional<Person> existing = personResolver.person(person.getCanonicalUri());
    if (!existing.isPresent()) {
      personWriter.createOrUpdatePerson(person);
    } else {
      merge(existing.get(), person);
      personWriter.createOrUpdatePerson(existing.get());
    }
  }

  private void merge(Person existing, Person newPerson) {
    existing.withName(newPerson.name());
    existing.setGivenName(newPerson.getGivenName());
    existing.setFamilyName(newPerson.getFamilyName());
    existing.setGender(newPerson.getGender());
    existing.setBirthDate(newPerson.getBirthDate());
    existing.setBirthPlace(newPerson.getBirthPlace());
    existing.setDescription(newPerson.getDescription());
    existing.setQuotes(newPerson.getQuotes());
    existing.setPublisher(Publisher.PA_PEOPLE);
    existing.setImages(newPerson.getImages());
    existing.setImage(newPerson.getImage());
  }

  private Person ingestPerson(org.atlasapi.remotesite.pa.profiles.bindings.Person paPerson) {
    Person person = new Person();
    person.setCanonicalUri(PERSON_URI_PREFIX + paPerson.getId());

    Name name = paPerson.getName();
    // First and Last name are optional in the dtd so check both are
    // non-null to avoid strange names.
    if (!Strings.isNullOrEmpty(name.getFirstname()) && !Strings.isNullOrEmpty(name.getLastname())) {
      person.withName(name.getFirstname() + " " + name.getLastname());
    }
    person.setGivenName(name.getFirstname());
    person.setFamilyName(name.getLastname());

    person.setGender(paPerson.getGender());
    if (paPerson.getBorn() != null) {
      person.setBirthDate(dateTimeFormatter.parseDateTime(paPerson.getBorn()));
    }
    person.setBirthPlace(paPerson.getBornIn());
    person.setDescription(paPerson.getEarlyLife() + "\n\n" + paPerson.getCareer());
    person.addQuote(paPerson.getQuote());
    person.setPublisher(Publisher.PA_PEOPLE);
    person.setImages(extractImages(paPerson.getPictures()));
    person.setImage(getPrimary(person.getImages()));
    setDirectEquivalentToPAPerson(person, paPerson.getId());
    return person;
  }

  private String getPrimary(Set<Image> images) {
    for (Image image : images) {
      if (ImageType.PRIMARY.equals(image.getType())) {
        return image.getCanonicalUri();
      }
    }
    return null;
  }

  private ImmutableSet<Image> extractImages(Pictures pictures) {
    return pictures != null ? extractImages(pictures.getPicture()) : ImmutableSet.<Image>of();
  }

  private ImmutableSet<Image> extractImages(List<Picture> pictures) {
    ImmutableSet.Builder<Image> images = ImmutableSet.builder();
    for (Picture picture : pictures) {
      if (!Strings.isNullOrEmpty(picture.getvalue())) {
        images.add(extractImage(picture));
      }
    }
    return images.build();
  }

  private Image extractImage(Picture picture) {
    Image image = new Image(BASE_IMAGE_URL + picture.getvalue());
    image.setWidth(Ints.tryParse(picture.getWidth()));
    image.setHeight(Ints.tryParse(picture.getHeight()));
    image.setType(ImageType.PRIMARY);
    return image;
  }

  /**
   * PA People are ingested separately from PA biogs people. Therefore we set a direct equivalence
   * on the PA person if they exist. In the future this will change to an equivalence job so the
   * equivalence will be asserted at a later stage even if the PA person doesn't exist at the time
   * when the PA biog person is ingested.
   *
   * @param person
   */
  private void setDirectEquivalentToPAPerson(Person person, String id) {
    Optional<Person> paPerson = personResolver.person(PA_PERSON_URI_PREFIX + id);
    if (paPerson.isPresent()) {
      person.setEquivalentTo(ImmutableSet.of(LookupRef.from(paPerson.get())));
    }
  }
}
Пример #18
0
 public String getDateStrISO8601NoTime() {
   DateTimeFormatter fmt = ISODateTimeFormat.date();
   ;
   return fmt.print(date);
 }
Пример #19
0
  @Test
  public void shouldFilterLogEntriesOnMultipleCriteria() throws Exception {
    DocumentModel doc = RestServerInit.getFile(1, session);

    DateTime firstDate = new DateTime();
    DateTime secondDate = firstDate.plusDays(10);

    List<LogEntry> logEntries = new ArrayList<>();
    LogEntry logEntry = auditLogger.newLogEntry();
    logEntry.setDocUUID(doc.getRef());
    logEntry.setCategory("One");
    logEntry.setEventId("firstEvent");
    logEntry.setPrincipalName("bender");
    logEntry.setEventDate(firstDate.toDate());
    logEntries.add(logEntry);
    logEntry = auditLogger.newLogEntry();
    logEntry.setDocUUID(doc.getRef());
    logEntry.setCategory("One");
    logEntry.setEventId("secondEvent");
    logEntry.setPrincipalName("leela");
    logEntry.setEventDate(firstDate.toDate());
    logEntries.add(logEntry);
    logEntry = auditLogger.newLogEntry();
    logEntry.setDocUUID(doc.getRef());
    logEntry.setCategory("One");
    logEntry.setEventId("firstEvent");
    logEntry.setPrincipalName("leela");
    logEntry.setEventDate(secondDate.toDate());
    logEntries.add(logEntry);
    logEntry = auditLogger.newLogEntry();
    logEntry.setDocUUID(doc.getRef());
    logEntry.setCategory("One");
    logEntry.setEventId("thirdEvent");
    logEntry.setPrincipalName("leela");
    logEntry.setEventDate(secondDate.toDate());
    logEntries.add(logEntry);
    auditLogger.addLogEntries(logEntries);

    TransactionHelper.commitOrRollbackTransaction();
    TransactionHelper.startTransaction();

    MultivaluedMap<String, String> queryParams = new MultivaluedMapImpl();
    queryParams.putSingle("category", "One");
    queryParams.add("principalName", "leela");
    ClientResponse response =
        getResponse(
            BaseTest.RequestType.GET, "id/" + doc.getId() + "/@" + AuditAdapter.NAME, queryParams);
    assertEquals(Response.Status.OK.getStatusCode(), response.getStatus());
    JsonNode node = mapper.readTree(response.getEntityInputStream());
    List<JsonNode> nodes = getLogEntries(node);
    assertEquals(3, nodes.size());

    queryParams = new MultivaluedMapImpl();
    queryParams.putSingle("category", "One");
    queryParams.add("principalName", "leela");
    queryParams.add("eventId", "thirdEvent");
    response =
        getResponse(
            BaseTest.RequestType.GET, "id/" + doc.getId() + "/@" + AuditAdapter.NAME, queryParams);
    assertEquals(Response.Status.OK.getStatusCode(), response.getStatus());
    node = mapper.readTree(response.getEntityInputStream());
    nodes = getLogEntries(node);
    assertEquals(1, nodes.size());

    queryParams = new MultivaluedMapImpl();
    queryParams.putSingle("category", "One");
    queryParams.add("principalName", "leela");
    queryParams.add("eventId", "thirdEvent");
    queryParams.add("startEventDate", ISODateTimeFormat.date().print(firstDate.plusDays(1)));
    queryParams.add("endEventDate", ISODateTimeFormat.date().print(secondDate.minus(1)));
    response =
        getResponse(
            BaseTest.RequestType.GET, "id/" + doc.getId() + "/@" + AuditAdapter.NAME, queryParams);
    assertEquals(Response.Status.OK.getStatusCode(), response.getStatus());
    node = mapper.readTree(response.getEntityInputStream());
    nodes = getLogEntries(node);
    assertEquals(0, nodes.size());
  }
 public long end() {
   return ISODateTimeFormat.date().parseDateTime(this.eventEnd).getMillis();
 }
Пример #21
0
 private boolean isToday() {
   return new DateTime(countDate)
       .toString(ISODateTimeFormat.date())
       .equals(
           new DateTime(new Date(System.currentTimeMillis())).toString(ISODateTimeFormat.date()));
 }