Ejemplo n.º 1
0
  /**
   * This creates a snapshot schema from the specified json.
   *
   * @param schemaJSON The JSON specifying the snapshot schema.
   */
  public SnapshotSchema(String schemaJSON) {
    setSchema(schemaJSON);

    try {
      initialize();
    } catch (Exception ex) {
      DTThrowable.rethrow(ex);
    }
  }
 @Override
 public void handleIdleTime() {
   if (execute) {
     try {
       Thread.sleep(spinningTime);
     } catch (InterruptedException ie) {
       throw new RuntimeException(ie);
     }
   } else {
     logger.error("Exception: ", cause);
     DTThrowable.rethrow(cause.get());
   }
 }
Ejemplo n.º 3
0
 /* both for Enum2String */
 @SuppressWarnings(value = {"unchecked", "rawtypes"})
 public static long initialize(final Class<?> clazz) {
   if (map.containsKey(clazz)) {
     return 0;
   }
   Set<Attribute<Object>> set = new HashSet<Attribute<Object>>();
   try {
     for (Field f : clazz.getDeclaredFields()) {
       if (Modifier.isStatic(f.getModifiers())
           && Attribute.class.isAssignableFrom(f.getType())) {
         Attribute<Object> attribute = (Attribute<Object>) f.get(null);
         if (attribute.name == null) {
           Field nameField = Attribute.class.getDeclaredField("name");
           nameField.setAccessible(true);
           nameField.set(attribute, clazz.getCanonicalName() + '.' + f.getName());
           nameField.setAccessible(false);
         }
         /* Handle trivial cases here even though this may spoil API users. */
         if (attribute.codec == null) {
           StringCodec<?> codec = null;
           if (attribute.defaultValue != null) {
             Class<?> klass = attribute.defaultValue.getClass();
             if (klass == String.class) {
               codec = new String2String();
             } else if (klass == Integer.class) {
               codec = new Integer2String();
             } else if (klass == Long.class) {
               codec = new Long2String();
             } else if (klass == Boolean.class) {
               codec = new Boolean2String();
             } else if (Enum.class.isAssignableFrom(klass)) {
               codec = new Enum2String(klass);
             }
           }
           if (codec != null) {
             Field codecField = Attribute.class.getDeclaredField("codec");
             codecField.setAccessible(true);
             codecField.set(attribute, codec);
             codecField.setAccessible(false);
           }
         }
         set.add(attribute);
       }
     }
   } catch (Exception ex) {
     DTThrowable.rethrow(ex);
   }
   map.put(clazz, set);
   return (long) clazz.getModifiers() << 32 | clazz.hashCode();
 }
Ejemplo n.º 4
0
 public static Map<Attribute<Object>, Object> getAllAttributes(
     Context context, Class<?> clazz) {
   Map<Attribute<Object>, Object> result = new HashMap<Attribute<Object>, Object>();
   try {
     for (Field f : clazz.getDeclaredFields()) {
       if (Modifier.isStatic(f.getModifiers())
           && Attribute.class.isAssignableFrom(f.getType())) {
         @SuppressWarnings(value = "unchecked")
         Attribute<Object> attribute = (Attribute<Object>) f.get(null);
         result.put(attribute, context.getValue(attribute));
       }
     }
   } catch (Exception ex) {
     DTThrowable.rethrow(ex);
   }
   return result;
 }
  /**
   * Assigns the partitions according to certain key values and keeps track of the keys that each
   * partition will be processing so that in the case of a rollback, each partition will only clear
   * the data that it is responsible for.
   */
  @Override
  public Collection<com.datatorrent.api.Partitioner.Partition<UniqueValueCountAppender<V>>>
      definePartitions(
          Collection<com.datatorrent.api.Partitioner.Partition<UniqueValueCountAppender<V>>>
              partitions,
          PartitioningContext context) {
    final int finalCapacity =
        DefaultPartition.getRequiredPartitionCount(context, this.partitionCount);
    UniqueValueCountAppender<V> anOldOperator =
        partitions.iterator().next().getPartitionedInstance();
    partitions.clear();

    Collection<Partition<UniqueValueCountAppender<V>>> newPartitions =
        Lists.newArrayListWithCapacity(finalCapacity);

    for (int i = 0; i < finalCapacity; i++) {
      try {
        @SuppressWarnings("unchecked")
        UniqueValueCountAppender<V> statefulUniqueCount = this.getClass().newInstance();
        DefaultPartition<UniqueValueCountAppender<V>> partition =
            new DefaultPartition<UniqueValueCountAppender<V>>(statefulUniqueCount);
        newPartitions.add(partition);
      } catch (Throwable cause) {
        DTThrowable.rethrow(cause);
      }
    }

    DefaultPartition.assignPartitionKeys(Collections.unmodifiableCollection(newPartitions), input);
    int lPartitionMask = newPartitions.iterator().next().getPartitionKeys().get(input).mask;

    for (Partition<UniqueValueCountAppender<V>> statefulUniqueCountPartition : newPartitions) {
      UniqueValueCountAppender<V> statefulUniqueCountInstance =
          statefulUniqueCountPartition.getPartitionedInstance();

      statefulUniqueCountInstance.partitionKeys =
          statefulUniqueCountPartition.getPartitionKeys().get(input).partitions;
      statefulUniqueCountInstance.partitionMask = lPartitionMask;
      statefulUniqueCountInstance.store = anOldOperator.store;
      statefulUniqueCountInstance.tableName = anOldOperator.tableName;
      statefulUniqueCountInstance.cacheManager = anOldOperator.cacheManager;
    }
    return newPartitions;
  }
  public void buildTypeGraph() {
    Map<String, JarFile> openJarFiles = new HashMap<String, JarFile>();
    Map<String, File> openClassFiles = new HashMap<String, File>();
    // use global cache to load resource in/out of the same jar as the classes
    Set<String> resourceCacheSet = new HashSet<>();
    try {
      for (String path : pathsToScan) {
        File f = null;
        try {
          f = new File(path);
          if (!f.exists()
              || f.isDirectory()
              || (!f.getName().endsWith("jar") && !f.getName().endsWith("class"))) {
            continue;
          }
          if (GENERATED_CLASSES_JAR.equals(f.getName())) {
            continue;
          }
          if (f.getName().endsWith("class")) {
            typeGraph.addNode(f);
            openClassFiles.put(path, f);
          } else {
            JarFile jar = new JarFile(path);
            openJarFiles.put(path, jar);
            java.util.Enumeration<JarEntry> entriesEnum = jar.entries();
            while (entriesEnum.hasMoreElements()) {
              final java.util.jar.JarEntry jarEntry = entriesEnum.nextElement();
              String entryName = jarEntry.getName();
              if (jarEntry.isDirectory()) {
                continue;
              }
              if (entryName.endsWith("-javadoc.xml")) {
                try {
                  processJavadocXml(jar.getInputStream(jarEntry));
                  // break;
                } catch (Exception ex) {
                  LOG.warn("Cannot process javadoc {} : ", entryName, ex);
                }
              } else if (entryName.endsWith(".class")) {
                TypeGraph.TypeGraphVertex newNode = typeGraph.addNode(jarEntry, jar);
                // check if any visited resources belong to this type
                for (Iterator<String> iter = resourceCacheSet.iterator(); iter.hasNext(); ) {
                  String entry = iter.next();
                  if (entry.startsWith(entryName.substring(0, entryName.length() - 6))) {
                    newNode.setHasResource(true);
                    iter.remove();
                  }
                }
              } else {
                String className = entryName;
                boolean foundClass = false;
                // check if this resource belongs to any visited type
                while (className.contains("/")) {
                  className = className.substring(0, className.lastIndexOf('/'));
                  TypeGraph.TypeGraphVertex tgv = typeGraph.getNode(className.replace('/', '.'));
                  if (tgv != null) {
                    tgv.setHasResource(true);
                    foundClass = true;
                    break;
                  }
                }
                if (!foundClass) {
                  resourceCacheSet.add(entryName);
                }
              }
            }
          }
        } catch (IOException ex) {
          LOG.warn("Cannot process file {}", f, ex);
        }
      }

      typeGraph.trim();

      typeGraph.updatePortTypeInfoInTypeGraph(openJarFiles, openClassFiles);
    } finally {
      for (Entry<String, JarFile> entry : openJarFiles.entrySet()) {
        try {
          entry.getValue().close();
        } catch (IOException e) {
          DTThrowable.wrapIfChecked(e);
        }
      }
    }
  }