Пример #1
0
  /**
   * This creates a snapshot schema from the specified json.
   *
   * @param schemaJSON The JSON specifying the snapshot schema.
   */
  public SnapshotSchema(String schemaJSON) {
    setSchema(schemaJSON);

    try {
      initialize();
    } catch (Exception ex) {
      DTThrowable.rethrow(ex);
    }
  }
 @Override
 public void handleIdleTime() {
   if (execute) {
     try {
       Thread.sleep(spinningTime);
     } catch (InterruptedException ie) {
       throw new RuntimeException(ie);
     }
   } else {
     logger.error("Exception: ", cause);
     DTThrowable.rethrow(cause.get());
   }
 }
Пример #3
0
 /* both for Enum2String */
 @SuppressWarnings(value = {"unchecked", "rawtypes"})
 public static long initialize(final Class<?> clazz) {
   if (map.containsKey(clazz)) {
     return 0;
   }
   Set<Attribute<Object>> set = new HashSet<Attribute<Object>>();
   try {
     for (Field f : clazz.getDeclaredFields()) {
       if (Modifier.isStatic(f.getModifiers())
           && Attribute.class.isAssignableFrom(f.getType())) {
         Attribute<Object> attribute = (Attribute<Object>) f.get(null);
         if (attribute.name == null) {
           Field nameField = Attribute.class.getDeclaredField("name");
           nameField.setAccessible(true);
           nameField.set(attribute, clazz.getCanonicalName() + '.' + f.getName());
           nameField.setAccessible(false);
         }
         /* Handle trivial cases here even though this may spoil API users. */
         if (attribute.codec == null) {
           StringCodec<?> codec = null;
           if (attribute.defaultValue != null) {
             Class<?> klass = attribute.defaultValue.getClass();
             if (klass == String.class) {
               codec = new String2String();
             } else if (klass == Integer.class) {
               codec = new Integer2String();
             } else if (klass == Long.class) {
               codec = new Long2String();
             } else if (klass == Boolean.class) {
               codec = new Boolean2String();
             } else if (Enum.class.isAssignableFrom(klass)) {
               codec = new Enum2String(klass);
             }
           }
           if (codec != null) {
             Field codecField = Attribute.class.getDeclaredField("codec");
             codecField.setAccessible(true);
             codecField.set(attribute, codec);
             codecField.setAccessible(false);
           }
         }
         set.add(attribute);
       }
     }
   } catch (Exception ex) {
     DTThrowable.rethrow(ex);
   }
   map.put(clazz, set);
   return (long) clazz.getModifiers() << 32 | clazz.hashCode();
 }
Пример #4
0
 public static Map<Attribute<Object>, Object> getAllAttributes(
     Context context, Class<?> clazz) {
   Map<Attribute<Object>, Object> result = new HashMap<Attribute<Object>, Object>();
   try {
     for (Field f : clazz.getDeclaredFields()) {
       if (Modifier.isStatic(f.getModifiers())
           && Attribute.class.isAssignableFrom(f.getType())) {
         @SuppressWarnings(value = "unchecked")
         Attribute<Object> attribute = (Attribute<Object>) f.get(null);
         result.put(attribute, context.getValue(attribute));
       }
     }
   } catch (Exception ex) {
     DTThrowable.rethrow(ex);
   }
   return result;
 }
  /**
   * Assigns the partitions according to certain key values and keeps track of the keys that each
   * partition will be processing so that in the case of a rollback, each partition will only clear
   * the data that it is responsible for.
   */
  @Override
  public Collection<com.datatorrent.api.Partitioner.Partition<UniqueValueCountAppender<V>>>
      definePartitions(
          Collection<com.datatorrent.api.Partitioner.Partition<UniqueValueCountAppender<V>>>
              partitions,
          PartitioningContext context) {
    final int finalCapacity =
        DefaultPartition.getRequiredPartitionCount(context, this.partitionCount);
    UniqueValueCountAppender<V> anOldOperator =
        partitions.iterator().next().getPartitionedInstance();
    partitions.clear();

    Collection<Partition<UniqueValueCountAppender<V>>> newPartitions =
        Lists.newArrayListWithCapacity(finalCapacity);

    for (int i = 0; i < finalCapacity; i++) {
      try {
        @SuppressWarnings("unchecked")
        UniqueValueCountAppender<V> statefulUniqueCount = this.getClass().newInstance();
        DefaultPartition<UniqueValueCountAppender<V>> partition =
            new DefaultPartition<UniqueValueCountAppender<V>>(statefulUniqueCount);
        newPartitions.add(partition);
      } catch (Throwable cause) {
        DTThrowable.rethrow(cause);
      }
    }

    DefaultPartition.assignPartitionKeys(Collections.unmodifiableCollection(newPartitions), input);
    int lPartitionMask = newPartitions.iterator().next().getPartitionKeys().get(input).mask;

    for (Partition<UniqueValueCountAppender<V>> statefulUniqueCountPartition : newPartitions) {
      UniqueValueCountAppender<V> statefulUniqueCountInstance =
          statefulUniqueCountPartition.getPartitionedInstance();

      statefulUniqueCountInstance.partitionKeys =
          statefulUniqueCountPartition.getPartitionKeys().get(input).partitions;
      statefulUniqueCountInstance.partitionMask = lPartitionMask;
      statefulUniqueCountInstance.store = anOldOperator.store;
      statefulUniqueCountInstance.tableName = anOldOperator.tableName;
      statefulUniqueCountInstance.cacheManager = anOldOperator.cacheManager;
    }
    return newPartitions;
  }