public synchronized byte[] getExtendedRAM(int size) { byte[] rv = null; if (size > 0) { if (this.ramExtended != null) { if (size > this.ramExtended.length) { rv = new byte[size]; Arrays.fill(rv, (byte) 0); System.arraycopy(this.ramExtended, 0, rv, 0, this.ramExtended.length); this.ramExtended = rv; } else { rv = this.ramExtended; } } else { rv = new byte[size]; Arrays.fill(rv, (byte) 0); this.ramExtended = rv; } } return rv; }
int Query() { int minimum = 100001; visited = new boolean[V]; Arrays.fill(visited, false); depth = new int[V]; Arrays.fill(depth, -1); low = new int[V]; Arrays.fill(low, -1); parent = new int[V]; Arrays.fill(parent, -1); articulationPoints = new TreeMap<Integer, Boolean>(); getArticulationPoints(0, 0); for (Map.Entry<Integer, Boolean> entry : articulationPoints.entrySet()) { int i = (int) entry.getKey(); if (RatingScore[i] < minimum) { minimum = RatingScore[i]; } } return minimum != 100001 ? minimum : -1; }
/** * Gets a list of all the devices found on the bus matching the specified set of product IDs. Any * device with a product ID equal to one of the products listed in <i>productIDs[]</i> will be * returned. You can search for devices by product name using {@link #productNameToID( String[] * productName ) productNameToID()}, like so: * * <pre>USBDevice[] devices = deviceManager.getDeviceByProductID( * deviceManager.productNameToID( new String[] { "USB-AO16-16A", "USB-AO16-16" } ) );</pre> * * @param productIDs an array containing one or more product IDs to search for. * @return An array of all the devices found. If no devices were found matching the specified set * of product IDs, the array will be empty (i.e. contain zero items). * @throws IllegalArgumentException */ public USBDevice[] getDeviceByProductID(int[] productIDs) { if (productIDs == null || productIDs.length < 1) throw new IllegalArgumentException("Invalid product ID array"); for (int index = 0; index < productIDs.length; index++) { if (productIDs[index] < MIN_PRODUCT_ID || productIDs[index] > MAX_PRODUCT_ID) throw new IllegalArgumentException("Invalid product ID: " + productIDs[index]); } // for( int index ... int[] sortedProductIDs = productIDs.clone(); Arrays.sort(sortedProductIDs); Vector<USBDevice> devices = new Vector<USBDevice>(); for (int index = 0; index < deviceList.size(); index++) { final int productID = deviceList.get(index).getProductID(); if (Arrays.binarySearch(sortedProductIDs, productID) >= 0) devices.add(deviceList.get(index)); } // for( int index ... return devices.toArray(new USBDevice[0]); } // getDeviceByProductID()
public static void main(String[] args) throws Exception { /* BufferedReader br=new BufferedReader(new FileReader("input.txt")); BufferedWriter out=new BufferedWriter(new FileWriter("output.txt")); */ BufferedReader br = new BufferedReader(new InputStreamReader(System.in), 2000); BufferedWriter out = new BufferedWriter(new OutputStreamWriter(System.out), 2000); String[] s = br.readLine().split(" "); int n = Integer.parseInt(s[0]); int q = Integer.parseInt(s[1]); int num[] = new int[n + 1]; int[] m = new int[3 * n + 1]; // size = 2*n+1 Arrays.fill(num, -1); s = br.readLine().split(" "); for (int i = 1; i <= n; i++) num[i] = Integer.parseInt(s[i - 1]); /// build tree maketree(1, 1, n, m, num); for (int qq = 1; qq <= q; qq++) { s = br.readLine().split(" "); int i = Integer.parseInt(s[0]); int j = Integer.parseInt(s[1]); int ans = query(1, 1, n, m, num, i, j); out.write("" + num[ans] + "\n"); out.flush(); } }
/** * Calculates word order similarity between the sentences, with weighted words * * @param s1 sentence 1 * @param s2 sentence 2 * @param weights1 of sentence 1 * @param weights2 of sentence 2 * @return Word order similarity value */ public double orderSimilarity( List<double[]> s1, List<double[]> s2, List<double[]> weights1, List<double[]> weights2, String sent2, String unique) { double[] s1Dist = s1.get(0); double[] s1Friend = s1.get(1); double[] s2Dist = s2.get(0); double[] s2Friend = s2.get(1); double[] r1 = new double[s1Dist.length]; double[] r2 = new double[s2Dist.length]; String[] sent = sent2.split(" "); String[] un = unique.split(" "); String word; // Specifies word order vectors for either sentence. // Threshold specifies that words can be seen as the same if similar enough // If same word not found in unique sentence, the order value is 0 for (int i = 0; i < r1.length; i++) { if (s1Dist[i] == 1.0) { r1[i] = i + 1; } else if (s1Dist[i] >= threshold) { r1[i] = s1Friend[i] + 1; } else { r1[i] = 0; } } for (int i = 0; i < r2.length; i++) { if (s2Dist[i] == 1.0) { word = un[i]; r2[i] = Arrays.asList(sent).indexOf(word) + 1; } else if (s2Dist[i] >= threshold) { r2[i] = s2Friend[i] + 1; } else { r2[i] = 0.0; } } double numerator = 0.0; double denominator = 0.0; // Calculate order similarity while avoiding division by 0 for (int i = 0; i < r1.length; i++) { numerator = numerator + Math.pow((r1[i] - r2[i]) * weights1.get(0)[i], 2); denominator = denominator + Math.pow((r1[i] + r2[i]) * weights1.get(0)[i], 2); } numerator = Math.sqrt(numerator); denominator = Math.sqrt(denominator); if (denominator == 0.0) { numerator = 1; denominator = 1; } return numerator / denominator; }
public static void main(String args[]) { // Step-0: Set-up inputs List<List<Integer>> input = new ArrayList<List<Integer>>(); List<Integer> l1 = Arrays.asList(new Integer[] {4, 5}); List<Integer> l2 = Arrays.asList(new Integer[] {1, 2, 3}); List<Integer> l3 = Arrays.asList(new Integer[] {8, 9, 10}); List<Integer> l4 = Arrays.asList(new Integer[] {6, 7}); List<Integer> l5 = Arrays.asList(new Integer[] {11, 12}); input.add(l1); input.add(l2); input.add(l3); input.add(l4); input.add(l5); System.out.println(input); // Step-2: Core logic List<Integer> output = mergeAllLists(input, 0); System.out.println(output); }
public synchronized void reset() { if (this.debugLevel > 0) { System.out.println("GIDE: reset"); } this.pendingCmd = Command.NONE; this.resetFlag = false; this.ioTaskNoWait = false; this.ioTaskThread.interrupt(); if (this.disks != null) { boolean sizeOK = false; if ((this.cylinders != null) && (this.heads != null) && (this.sectorsPerTrack != null) && (this.totalSectors != null)) { if ((this.cylinders.length >= this.disks.length) && (this.heads.length >= this.disks.length) && (this.sectorsPerTrack.length >= this.disks.length) && (this.totalSectors.length >= this.disks.length)) { sizeOK = false; } } if (!sizeOK) { this.cylinders = new int[this.disks.length]; this.heads = new int[this.disks.length]; this.sectorsPerTrack = new int[this.disks.length]; this.totalSectors = new long[this.disks.length]; } int maxSectsPerTrack = 0; for (int i = 0; i < this.disks.length; i++) { this.cylinders[i] = disks[i].getCylinders(); this.heads[i] = disks[i].getHeads(); this.sectorsPerTrack[i] = disks[i].getSectorsPerTrack(); this.totalSectors[i] = (long) this.cylinders[i] * (long) this.heads[i] * (long) this.sectorsPerTrack[i]; if (this.sectorsPerTrack[i] > maxSectsPerTrack) { maxSectsPerTrack = this.sectorsPerTrack[i]; } } int bufSize = Math.max(maxSectsPerTrack, 1) * SECTOR_SIZE; if (this.ioBuf != null) { if (this.ioBuf.length < bufSize) { this.ioBuf = null; } } if (this.ioBuf == null) { this.ioBuf = new byte[bufSize]; } } if (this.ioBuf != null) { Arrays.fill(this.ioBuf, (byte) 0); } softReset(); }
public Main() { try { BufferedReader in; in = new BufferedReader(new InputStreamReader(System.in)); // Used for CCC int numLights = Integer.parseInt(in.readLine()); int[] states = new int[numLights]; for (int i = 0; i < numLights; i++) { states[i] = Integer.parseInt(in.readLine()); } ArrayDeque<Scenario> Q = new ArrayDeque<Scenario>(); HashMap<String, Integer> dp = new HashMap<String, Integer>(); int moves = 0; Q.addLast(new Scenario(states)); while (!Q.isEmpty()) { int size = Q.size(); for (int q = 0; q < size; q++) { Scenario temp = Q.removeFirst(); if (isEmpty(temp.states)) { System.out.println(moves); return; } else { for (int i = 0; i < temp.states.length; i++) { if (temp.states[i] == 0) { int[] newArr = Arrays.copyOf(temp.states, temp.states.length); newArr[i] = 1; newArr = fixArray(newArr); String arr = ""; for (int p = 0; p < newArr.length; p++) arr += newArr[p]; if (dp.get(arr) == null) { dp.put(arr, moves); Q.addLast(new Scenario(newArr)); } else { int val = dp.get(arr); if (val != 0 && moves < val) { dp.put(arr, moves); Q.addLast(new Scenario(newArr)); } } // outputArr(newArr); } } } } moves++; } } catch (IOException e) { System.out.println("IO: General"); } }
public static void sort(int[] arr, int from, int to) { if (from == to) return; int[] temp_arr = new int[to - from + 1]; for (int i = from; i < to + 1; i++) { temp_arr[i - from] = arr[i]; } Arrays.sort(temp_arr); for (int i = from; i < to + 1; i++) { arr[i] = temp_arr[i - from]; } }
// TLE public static String getPermutation(int n, int k) throws Exception { int N = n; // your code goes here int[] arr = new int[N]; for (int i = 0; i < N; i++) { arr[i] = i + 1; } int M = k; // (less than N!) int[] res = printMthPermutation(arr, M); return Arrays.toString(res); }
/** * Finds the weights of a word, both concerning the weight of the word itself, but also its * closest friend in the unique words. Note that if the word in the sentence exists in unique * words these will be the same The weights are inversely proportional to the frequency of the * word Frequencies of words are found in wordFreqs * * @param wordFreqs of word weights * @param sent sentence * @param unique all unique words in both sentences to be compared * @param sim Values of distances, and closest words to unique words for the sentence * @param sentJunk Sentence with nonsense words included * @return Word weights for all words in sentence/unique sentence */ public List<double[]> WordWeights( List<WordFreq> wordFreqs, String sent, String unique, List<double[]> sim, String sentJunk) { String[] sentWordsJunk = sentJunk.split(" "); String[] sentWords = sent.split(" "); String[] uniqueWords = unique.split(" "); String friendWord = null; double[] weightsSent = new double[uniqueWords.length]; // Weights of closest words in sent to words in uniqueWords double[] weightsUnique = new double[uniqueWords.length]; // Weights of words in uniqueWords for (int j = 0; j < wordFreqs.size(); j++) { /* For each existing word in the listof words, check if it corresponds to the current word, then checks frequency value */ for (int i = 0; i < uniqueWords.length; i++) { if ((wordFreqs.get(j).getWord()).equals(uniqueWords[i])) { weightsUnique[i] = 1 / wordFreqs.get(j).getFreq(); } } } for (int i = 0; i < uniqueWords.length; i++) { int index = Arrays.asList(sentWords).indexOf(uniqueWords[i]); if (index >= 0) { weightsSent[i] = weightsUnique[i]; } else { // if(sim.get(0)[i]>=threshold){ friendWord = sentWordsJunk[(int) sim.get(1)[i]]; index = Arrays.asList(uniqueWords).indexOf(friendWord); weightsSent[i] = weightsUnique[index]; // gets friend in sent } } List<double[]> results = new ArrayList<double[]>(); results.add(weightsUnique); results.add(weightsSent); return results; }
private void fireFetchSectors() { if ((this.curDisk != null) && (this.curDiskIdx >= 0)) { long pos = calcFilePos(); if (pos < 0) { this.errorReg = ERROR_CMD_ABORTED; this.statusReg |= STATUS_ERROR; } else { Arrays.fill(this.ioBuf, (byte) 0xE5); File file = this.curDisk.getFile(); if (file != null) { int nSec = Math.min(this.sectorsPerTrack[this.curDiskIdx] - this.sectorNum + 1, this.sectorCnt); startIOTask(file, pos, nSec * SECTOR_SIZE); } } } }
@RequestMapping(value = VIDEO_SEARCH_PATH, method = RequestMethod.GET) public @ResponseBody String[] searchVideo( @RequestParam(value = "username") String uName, @RequestParam(value = "video") String videoHash, HttpServletResponse response) { System.out.println("Search from:" + uName); if (!user_vidNameMap.containsKey(uName)) { response.setStatus(402); // client not connected return null; } Set<String> users = vidName_UserMap.get(videoHash); if (users == null) { System.out.println("Srearching main server\n"); try { users = masterService.psSearch(hostAdder, videoHash); } catch (Exception e) { System.err.println(e.getMessage()); return null; } if (users == null) return null; if (vidName_UserMap.containsKey(videoHash)) { vidName_UserMap.get(videoHash).addAll(users); } else { Set<String> s = new HashSet<String>(); s.addAll(users); vidName_UserMap.put(videoHash, s); } } else { Iterator<String> it = users.iterator(); while (it.hasNext()) { String temp = it.next(); if (!activeUsers.contains(temp)) { it.remove(); } } } System.out.println("Search result : " + Arrays.asList(users.toArray(new String[0]))); // String [] a = new String[] return users.toArray(new String[0]); }
public static void main(String[] args) throws IOException { BufferedReader reader = new BufferedReader(new InputStreamReader(System.in)); String line = null; String[] splitLine = null; while ((line = reader.readLine()) != null) { if (line.isEmpty()) { System.out.println(); continue; } int N = Integer.valueOf(line); int[] array = new int[N]; splitLine = reader.readLine().split("\\s+"); for (int i = 0; i < N; i++) { array[i] = Integer.valueOf(splitLine[i]); } int M = Integer.valueOf(reader.readLine()); Arrays.sort(array); int minDifference = Integer.MAX_VALUE; int i = 0, j = 0; for (int a = 0; a < N; a++) { int first = array[a]; int b = binary_search(array, M - first, 0, N - 1, a); if (b >= 0) { int second = array[b]; if ((first + second) == M) { if (minDifference > Math.abs(first - second)) { j = second; i = first; minDifference = Math.abs(first - second); } } } } System.out.println("Peter should buy books whose prices are " + i + " and " + j + "."); } }
public EmuThread(ScreenFrm screenFrm, Properties props) { super("JKCEMU CPU"); this.screenFrm = screenFrm; this.z80cpu = new Z80CPU(this, this); this.monitor = "a monitor object for synchronization"; this.joyFrm = null; this.joyThreads = new JoystickThread[2]; this.ram = new byte[0x10000]; this.ramExtended = null; this.ramFloppy1 = new RAMFloppy(); this.ramFloppy2 = new RAMFloppy(); this.printMngr = new PrintMngr(); this.audioIn = null; this.audioOut = null; this.loadData = null; this.resetLevel = ResetLevel.POWER_ON; this.emuRunning = false; this.emuSys = null; Arrays.fill(this.joyThreads, null); applySettings(props); }
private void execCmdIdentifyDrive() { if (this.curDisk != null) { Arrays.fill(this.ioBuf, 0, SECTOR_SIZE, (byte) 0); setIOBufWord(0, 0x015A); setIOBufWord(2, this.curDisk.getCylinders()); setIOBufWord(6, this.curDisk.getHeads()); setIOBufWord(8, this.curDisk.getSectorsPerTrack() * SECTOR_SIZE); setIOBufWord(10, SECTOR_SIZE); setIOBufWord(12, this.curDisk.getSectorsPerTrack()); setIOBufASCII(20, Main.VERSION, 20); setIOBufWord(42, 1); // 1 Sektor Puffer setIOBufASCII(46, "JKCEMU", 8); String model = this.curDisk.getDiskModel(); if (model != null) { if (model.isEmpty()) { model = null; } } if (model == null) { model = String.format( "Sonstige (%dx%dx%d)", this.curDisk.getCylinders(), this.curDisk.getHeads(), this.curDisk.getSectorsPerTrack()); } setIOBufASCII(54, model, 40); File file = this.curDisk.getFile(); if (file != null) { if (!file.canWrite()) { setIOBufWord(98, 1); // schreibgeschuetzt } } this.ioBufPos = 0; this.pendingCmd = Command.IDENTIFY_DISK; this.statusReg |= STATUS_DATA_REQUEST; fireInterrupt(); } }
// sieve public static int[] primes(int n) throws Exception { // for(int i=1;i<=arr.length-1;i++)out.write(""+arr[i]+" "); boolean arr[] = new boolean[n + 1]; Arrays.fill(arr, true); for (int i = 1; i <= Math.sqrt(n); i++) { if (!arr[i]) continue; for (int j = 2 * i; j <= n; j += i) { arr[i] = false; } } LinkedList<Integer> ll = new LinkedList<Integer>(); for (int i = 1; i <= n; i++) { if (arr[i]) ll.add(i); } n = ll.size(); int primes[] = new int[n + 1]; for (int i = 1; i <= n; i++) { primes[i] = ll.removeFirst(); } return primes; }
public Scenario(int[] states) { this.states = Arrays.copyOf(states, states.length); }
private void writeFormatByte(int value) { if ((this.curDisk != null) && (this.curDiskIdx >= 0)) { if (this.ioBufPos < SECTOR_SIZE) { this.ioBuf[this.ioBufPos++] = (byte) value; if (this.ioBufPos == this.ioBuf.length) { boolean cmdFinished = true; /* * Da das verwendete Dateiformat die Sektornummern nicht speichert, * muessen diese mit eins beginnen und fortlaufend sein. * Nachfolgend wird geprueft, ob diese Bedingung erfuellt ist. */ int spt = this.sectorsPerTrack[this.curDiskIdx]; boolean[] sectors = new boolean[spt]; Arrays.fill(sectors, false); int ptr = 0; boolean err = false; for (int i = 0; !err && (i < sectors.length); i++) { if ((ptr + 1) < SECTOR_SIZE) { // jeweils 1. Byte muss 00 (good sector) sein if (this.ioBuf[ptr++] != 0) { err = true; } // jeweils 2. Byte gibt Sektornummer an int v = (int) this.ioBuf[ptr++] & 0xFF; if (v < sectors.length) { if (sectors[v]) { err = true; // Sektornummer zweimal angegeben } else { sectors[v] = true; } } else { err = true; // Sktornummer ausserhalb des Bereichs } } else { err = true; // sollte niemals vorkommen } } if (!err) { for (int i = 0; i < sectors.length; i++) { if (!sectors[i]) { err = true; // Sektornummer fehlt break; } } } if (err) { this.errorReg = ERROR_UNCORRECTABLE_DATA; this.statusReg |= STATUS_ERROR; } else { // Sektoren loeschen int heads = this.heads[this.curDiskIdx]; long headNum = this.sdhReg & 0x0F; if ((this.cylNum >= 0) && (this.cylNum < this.cylinders[this.curDiskIdx]) && (headNum >= 0) && (headNum < heads)) { long sectOffs = (this.cylNum * heads * spt) + (headNum * spt); startIOTask( this.curDisk.getFile(), 0x0100 + (sectOffs * ((long) SECTOR_SIZE)), spt * SECTOR_SIZE); cmdFinished = false; } } if (cmdFinished) { fireInterrupt(); } } } } }
public void debug(Object... o) { System.err.println(Arrays.deepToString(o)); }
/** sorts the user list and rebuilds the user list from the sorted user vector, */ public void updateList() { Object[] tmp = users.toArray(); Arrays.sort(tmp); userList.setListData(tmp); }
public Main() { try { in = new BufferedReader(new InputStreamReader(System.in)); // minimum distance from D to K int numCities = nextInt(); int tradeRoutes = nextInt(); int[][] adjacencyMatrix = new int[numCities][numCities]; int[] minDistance = new int[numCities]; Arrays.fill(minDistance, 100000000); // Arrays.fill(adjacencyMatrix, -1); // int [] pencilCosts = new int[ Node[] cities = new Node[numCities]; for (int x = 0; x < tradeRoutes; x++) { int cityA = nextInt() - 1; int cityB = nextInt() - 1; int cost = nextInt(); if (cities[cityA] == null) cities[cityA] = new Node(cityA); if (cities[cityB] == null) cities[cityB] = new Node(cityB); adjacencyMatrix[cityA][cityB] = cost; adjacencyMatrix[cityB][cityA] = cost; // cities[cityA].routes.add(new Edge(cost, cities[cityB])); // cities[cityB].routes.add(new Edge(cost, cities[cityA])); } int numStores = nextInt(); int[] pencilCosts = new int[numCities]; Arrays.fill(pencilCosts, -1); for (int x = 0; x < numStores; x++) { int ID = nextInt() - 1; int cost = nextInt(); pencilCosts[ID] = cost; } int destination = nextInt() - 1; // if (isGood[destination]){ // } int minCost = 100000000; Queue<Node> Q = new LinkedList<Node>(); // PriorityQueue<Node> Q = new PriorityQueue<Node>(); minDistance[destination] = 0; // cities[destination].distance = 0; Q.offer(cities[destination]); while (!Q.isEmpty()) { Node temp = Q.poll(); for (int x = 0; x < numCities; x++) { if (adjacencyMatrix[temp.ID][x] != 0 && (minDistance[x] == 100000000 || minDistance[x] > minDistance[temp.ID] + adjacencyMatrix[temp.ID][x])) { minDistance[x] = minDistance[temp.ID] + adjacencyMatrix[temp.ID][x]; if (pencilCosts[x] != -1 && minDistance[x] < minCost) { // System.out.println(minCost); minCost = Math.min(minDistance[x] + pencilCosts[x], minCost); Q.offer(cities[x]); } else { if (pencilCosts[x] == -1) { // why> Q.offer(cities[x]); } } // Q.offer(temp.routes.get(x).destination); } } } for (int x = 0; x < numCities; x++) { if (pencilCosts[x] != -1 && pencilCosts[x] + minDistance[x] < minCost && minDistance[x] != 100000000) { minCost = minDistance[x] + pencilCosts[x]; } } System.out.println(minCost); } catch (IOException e) { System.out.println("IO: General"); } }
public static void main(String args[]) throws InterruptedException, IOException { int i, j; String serverInetAddress = "localhost"; String server1AddressString = "10.10.1.1"; InetAddress server1Address = InetAddress.getByName(server1AddressString); String server2AddressString = "10.10.2.2"; InetAddress server2Address = InetAddress.getByName(server2AddressString); String server3AddressString = "10.10.3.2"; InetAddress server3Address = InetAddress.getByName(server3AddressString); String server4AddressString = "localhost"; InetAddress server4Address = InetAddress.getByName(server4AddressString); DatagramSocket skt; { skt = new DatagramSocket(PORT_NUMBER_CLIENT); // socket used to listen and write InetAddress host = InetAddress.getByName(serverInetAddress); int serversocket = S1.PORT_NUMBER_SERVER; String msg = "Send file size"; byte[] b = msg.getBytes(); // dummy assignments - not used anywhere int filesize = 1; DatagramPacket reply, request; reply = new DatagramPacket(b, b.length, host, serversocket); request = new DatagramPacket(b, b.length, host, serversocket); for (i = 1; i <= 3; i++) { // defining a packet called request with parameters b(msg in bytes), b.length, host Internet // address and socket number if (i == 1) { host = server1Address; } else if (i == 2) { host = server2Address; } else if (i == 3) { host = server3Address; } request = new DatagramPacket(b, b.length, host, serversocket); // System.out.println("request sent from client to server"); Thread.sleep(S1.PAUSE_DURATION); // for error checks // Sending the packet- for getting the file size skt.send(request); // getting reply from // server........................................................................................ byte[] buffer = new byte [S1.PACKET_SIZE]; // apparently the size of data packet at the receiving side needs // to be bigger than the size of incoming datapacket reply = new DatagramPacket(buffer, buffer.length); // receiving packet from server - contatining filesize skt.receive(reply); // System.out.println("Response Received from server"); // System.out.println("on Client: - filesize= "+new String(reply.getData())); filesize = Integer.parseInt(new String(reply.getData()).trim()); // System.out.println("on Client: - filesize= "+filesize); Thread.sleep(S1.PAUSE_DURATION); } // here the client know the size of the file // Find the number of times it must make iterations - dividing filesize by packet_size // Request that many packets from server String[] buffer_string = new String[BUFFER_SIZE_CLIENT]; float delay[] = new float[filesize / S1.PACKET_SIZE]; System.out.println(filesize); System.out.println(S1.PACKET_SIZE); System.out.println(filesize / S1.PACKET_SIZE); Thread.sleep(2000); byte[] buffer = new byte[S1.PACKET_SIZE]; for (i = 0; i < filesize / S1.PACKET_SIZE; i++) { if (i % 100 != 0) { // System.out.print(" "+i); } else { System.out.println(" " + i); } msg = String.valueOf(i); b = msg.getBytes(); if (i % 3 == 0) { host = server1Address; } else if (i % 3 == 1) { host = server2Address; } else if (i % 3 == 2) { host = server3Address; } request = new DatagramPacket(b, b.length, host, serversocket); skt.send(request); delay[i] = System.nanoTime(); Thread.sleep(10); skt.receive(reply); delay[i] = System.nanoTime() - delay[i]; delay[i] = delay[i] / (1000000); /* if(empty_index<BUFFER_SIZE_CLIENT) { buffer_string[empty_index]=new String(reply.getData()); empty_index++; } else { for(j=0;j<BUFFER_SIZE_CLIENT-1;j++) { buffer_string[j]=buffer_string[j+1]; } buffer_string[BUFFER_SIZE_CLIENT-1]=new String(reply.getData()); }*/ // display_buffer(buffer_string); } Arrays.sort(delay); float delay2[] = new float[filesize / S1.PACKET_SIZE]; for (i = 0; i < delay2.length; i++) { delay2[i] = delay[delay.length - i - 1]; } // delay2 stores the array in descending values float[] Sk = new float[filesize / S1.PACKET_SIZE]; Sk[0] = (float) 0.0; for (i = 1; i < filesize / S1.PACKET_SIZE; i++) { for (j = 1; j <= i; j++) { Sk[i] = Sk[i] + delay2[j]; } Sk[i] = Sk[i] / (10 * i); } make_output(Sk); System.out.format( "Sk at 2=%f\n,10=%f\n,20=%f\n,100=%f\n and 30000=%f\n ", Sk[1], Sk[9], Sk[19], Sk[99], Sk[29999]); // display_buffer(buffer_string); skt.close(); } }
@Override public void run() { this.emuRunning = true; while (this.emuRunning) { try { /* * Pruefen, ob ein Programm geladen oder der Emulator * tatsaechlich zurueckgesetzt werden soll */ LoadData loadData = null; synchronized (this.monitor) { loadData = this.loadData; if (loadData != null) { this.loadData = null; } else { if (this.resetLevel == ResetLevel.POWER_ON) { Arrays.fill(this.ram, (byte) 0); } } } if (loadData != null) { loadData.loadIntoMemory(this); this.z80cpu.setRegPC(loadData.getStartAddr()); if (this.emuSys != null) { int spInitValue = this.emuSys.getAppStartStackInitValue(); if (spInitValue > 0) { this.z80cpu.setRegSP(spInitValue); } } } else { if ((this.resetLevel == ResetLevel.COLD_RESET) || (this.resetLevel == ResetLevel.POWER_ON)) { this.z80cpu.resetCPU(true); } else { this.z80cpu.resetCPU(false); } if (this.emuSys != null) { this.emuSys.reset(this.resetLevel, Main.getProperties()); this.z80cpu.setRegPC(this.emuSys.getResetStartAddress(this.resetLevel)); } } // RAM-Floppies und Druckmanager zuruecksetzen this.printMngr.reset(); this.ramFloppy1.reset(); this.ramFloppy2.reset(); if ((this.emuSys != null) && (this.resetLevel == ResetLevel.POWER_ON) && Main.getBooleanProperty("jkcemu.ramfloppy.clear_on_power_on", false)) { if (this.emuSys.supportsRAMFloppy1() && (this.ramFloppy1.getUsedSize() > 0)) { this.ramFloppy1.clear(); } if (this.emuSys.supportsRAMFloppy2() && (this.ramFloppy2.getUsedSize() > 0)) { this.ramFloppy2.clear(); } } // Fenster informieren final Frame[] frms = Frame.getFrames(); if (frms != null) { EventQueue.invokeLater( new Runnable() { @Override public void run() { for (Frame f : frms) { if (f instanceof BasicFrm) { ((BasicFrm) f).resetFired(); } } } }); } // in die Z80-Emulation verzweigen this.resetLevel = ResetLevel.NO_RESET; this.z80cpu.run(); } catch (Z80ExternalException ex) { } catch (Exception ex) { this.emuRunning = false; EventQueue.invokeLater(new ErrorMsg(this.screenFrm, ex)); } } }
/* * We incorporate the ability to create an arbitrary network structure. * We use array of arrays of doubles for each inter-layer matrix * Thus, between each layer, we need a matrix of weights. * Num rows * num columns in matrix = nodes in layer below * nodes in layer above * * We use the Math library's pow function to raise to exponent: double pow(double base, double exponent) * * Hidden Nodes in current Layer (j) * previous layers nodes[ ] * Features [ Wij ] * (i) [ ] * * I set up a matrix with dimensions: [ nodes in previous layer ] [ nodes in next layer ] * * Since we are traveling through one layer at a time, we need to have another data structure * that will be outputs for this layer * * I use for loops to initialize array of arrays ( allocated necessary memory) * Please note that: number of layers + 1 = number of weight arrays needed */ public void train(Matrix features, Matrix labels) throws Exception { double[] recentAccuracies = new double[5]; int currentAccuracyIndex = 0; double currentAccuracy = 0; Random rand = new Random(); // SHUFFLE labels, features together features.shuffle(rand, labels); // need to map 0,1, or 2 to the three dimensional vectors, DO N-OF-K-ENCODING FOR THE // BACKPROPAGATION Matrix newNOfKLabelsMatrix = new Matrix(); newNOfKLabelsMatrix.setSize( labels.rows(), labels.valueCount(0)); // I HARD CODE IN THAT THERE SHOULD BE 3 OUTPUT NODES for (int row = 0; row < newNOfKLabelsMatrix.rows(); row++) { // for each instance for (int k = 0; k < labels.valueCount(0); k++) { if (labels.get(row, 0) == k) { for (int m = 0; m < labels.valueCount(0); m++) { newNOfKLabelsMatrix.set(row, m, 0); } newNOfKLabelsMatrix.set(row, k, 1); } } } labels = newNOfKLabelsMatrix; // IMMEDIATELY SAVE SOME OF THIS, NEVER WILL TRAIN ON THESE // STICK THESE INTO A VALIDATION SET // ONCE MSE STARTS TO INCREASE AGAIN ON THE VALIDATION SET, WE'VE GONE TOO FAR int numRowsToGetIntoTrainingSet = (int) (features.rows() * validationSetPercentageOfData); Matrix featuresForTrainingTrimmed = new Matrix(); featuresForTrainingTrimmed.setSize(numRowsToGetIntoTrainingSet, features.cols()); Matrix featuresValidationSet = new Matrix(); featuresValidationSet.setSize(features.rows() - numRowsToGetIntoTrainingSet, features.cols()); Matrix labelsForTrainingTrimmed = new Matrix(); labelsForTrainingTrimmed.setSize(numRowsToGetIntoTrainingSet, labels.cols()); Matrix labelsValidationSet = new Matrix(); labelsValidationSet.setSize(features.rows() - numRowsToGetIntoTrainingSet, labels.cols()); // LOOP THROUGH AND PUT MOST OF FEATURES INTO featuresForTrainingTrimmed for (int row = 0; row < features.rows(); row++) { for (int col = 0; col < features.cols(); col++) { if (row < numRowsToGetIntoTrainingSet) { featuresForTrainingTrimmed.set(row, col, features.get(row, col)); } else { featuresValidationSet.set(row - numRowsToGetIntoTrainingSet, col, features.get(row, col)); } } } // LOOP THROUGH AND PUT MOST OF FEATURES INTO featuresForTrainingTrimmed for (int row = 0; row < labels.rows(); row++) { for (int col = 0; col < labels.cols(); col++) { if (row < numRowsToGetIntoTrainingSet) { labelsForTrainingTrimmed.set(row, col, labels.get(row, col)); } else { labelsValidationSet.set(row - numRowsToGetIntoTrainingSet, col, labels.get(row, col)); } } } features = featuresForTrainingTrimmed; labels = labelsForTrainingTrimmed; // LOOP THROUGH AND PUT LEFTOVER PORTION OF FEATURES INTO validationSet arrayListOfEachLayersWeightMatrices = new ArrayList<double[][]>(); for (int i = 0; i < numHiddenLayers + 1; i++) { // each layer double[][] specificLayersWeightMatrix; if (i == 0) { // first hidden layer (Each layer owns its own weights) specificLayersWeightMatrix = new double[features.cols()][numNodesPerHiddenLayer[i]]; // INPUTS are the rows } else if (i == numHiddenLayers) { specificLayersWeightMatrix = new double[numNodesPerHiddenLayer[i - 1]][labels.cols()]; // OUTPUTS ARE THE COLUMNS } else { specificLayersWeightMatrix = new double[numNodesPerHiddenLayer[i - 1]][numNodesPerHiddenLayer[i]]; } arrayListOfEachLayersWeightMatrices.add(specificLayersWeightMatrix); } changeInWeightMatricesForEveryLayer = new ArrayList<double[][]>(); for (int i = 0; i < numHiddenLayers + 1; i++) { // each layer double[][] specificLayersWeightMatrix; if (i == 0) { // first hidden layer (Each layer owns its own weights) specificLayersWeightMatrix = new double[features.cols()][numNodesPerHiddenLayer[i]]; // INPUTS are the rows } else if (i == numHiddenLayers) { specificLayersWeightMatrix = new double[numNodesPerHiddenLayer[i - 1]][labels.cols()]; // OUTPUTS ARE THE COLUMNS } else { specificLayersWeightMatrix = new double[numNodesPerHiddenLayer[i - 1]][numNodesPerHiddenLayer[i]]; } changeInWeightMatricesForEveryLayer.add(specificLayersWeightMatrix); } // allocate space/ initialize the previous change in weights that we'll use for momentum temporaryStashChangeInWeightMatricesForEveryLayer = new ArrayList<double[][]>(); for (int i = 0; i < numHiddenLayers + 1; i++) { // each layer double[][] specificLayersWeightMatrix; if (i == 0) { // first hidden layer (Each layer owns its own weights) specificLayersWeightMatrix = new double[features.cols()][numNodesPerHiddenLayer[i]]; // INPUTS are the rows } else if (i == numHiddenLayers) { specificLayersWeightMatrix = new double[numNodesPerHiddenLayer[i - 1]][labels.cols()]; // OUTPUTS ARE THE COLUMNS } else { specificLayersWeightMatrix = new double[numNodesPerHiddenLayer[i - 1]][numNodesPerHiddenLayer[i]]; } temporaryStashChangeInWeightMatricesForEveryLayer.add(specificLayersWeightMatrix); } // ALLOCATE SPACE FOR DELTA ( INTERMEDIATE VALUES THAT WE USE TO UPDATE THE WEIGHTS) arrayListOfEachLayersDeltaArray = new ArrayList<double[]>(); // EACH LAYER HAS AN ARRAY OF DELTA VALUES for (int i = 0; i < numHiddenLayers + 2; i++) { // each layer // OF COURSE WE COULD HAVE DONE numHiddenLayers + 1, but I want // consistency with fnet ArrayList double[] specificLayersDeltaArray; if (i == 0) { // first hidden layer (Each layer owns its own weights) specificLayersDeltaArray = new double[features.cols()]; // INPUTS are the rows } else if (i == (numHiddenLayers + 1)) { // specificLayersDeltaArray = new double[ numNodesPerHiddenLayer[ i-1 ] ] ; //[ // numNodesPerHiddenLayer[ labels.cols() ] ] ; // OUTPUTS ARE THE COLUMNS specificLayersDeltaArray = new double[labels.cols()]; // FIND OUT # NODES AT EACH LEVEL } else { specificLayersDeltaArray = new double[numNodesPerHiddenLayer[i - 1]]; } arrayListOfEachLayersDeltaArray.add(specificLayersDeltaArray); } previousChangeInWeightMatricesForEachLayer = new ArrayList<double[][]>(); for (int i = 0; i < numHiddenLayers + 1; i++) { // each layer double[][] specificLayersWeightMatrix; if (i == 0) { // first hidden layer (Each layer owns its own weights) specificLayersWeightMatrix = new double[features.cols()][numNodesPerHiddenLayer[i]]; // INPUTS are the rows } else if (i == numHiddenLayers) { specificLayersWeightMatrix = new double[numNodesPerHiddenLayer[i - 1]][labels.cols()]; // OUTPUTS ARE THE COLUMNS } else { specificLayersWeightMatrix = new double[numNodesPerHiddenLayer[i - 1]][numNodesPerHiddenLayer[i]]; } previousChangeInWeightMatricesForEachLayer.add(specificLayersWeightMatrix); } // INITIALIZE ALL OF PREVIOUS DELTA VALUES TO 0 [ THIS IS DONE AUTOMATICALLY, CAN DELETE ALL OF // THIS CODE ] // initialize all weights randomly ( small random weights with 0 mean) double[][] currentLayersWeightMatrix; for (int i = 0; i < numNodesPerHiddenLayer.length + 1; i++) { // scroll across each layer currentLayersWeightMatrix = arrayListOfEachLayersWeightMatrices.get(i); for (int j = 0; j < currentLayersWeightMatrix.length; j++) { for (int k = 0; k < currentLayersWeightMatrix[j].length; k++) { currentLayersWeightMatrix[j][k] = (2 * rand.nextDouble()) - 1; } } } // GO THROUGH AND ADD THE SPECIFIC WEIGHTS // Initial Weights: // PUT ALL BIAS WEIGHTS INTO ARRAYLIST (ONE ARRAY FOR EACH LAYER'S BIAS WEIGHTS) biasWeightsAcrossAllLayers = new ArrayList<double[]>(); for (int i = 0; i < numHiddenLayers + 1; i++) { if (i < numHiddenLayers) { double[] biasArrayToBeAdded = new double[numNodesPerHiddenLayer[i]]; biasWeightsAcrossAllLayers.add(biasArrayToBeAdded); } else { double[] biasArrayForOutputNodesToBeAdded = new double[labels.cols()]; biasWeightsAcrossAllLayers.add(biasArrayForOutputNodesToBeAdded); } } double[] currentBiasLayersWeightArray; for (int i = 0; i < numNodesPerHiddenLayer.length + 1; i++) { // scroll across each layer currentBiasLayersWeightArray = biasWeightsAcrossAllLayers.get(i); for (int j = 0; j < currentBiasLayersWeightArray.length; j++) { currentBiasLayersWeightArray[j] = (2 * rand.nextDouble()) - 1; } } // We'll need to store the previous bias weights previousBiasChangeInWeightsAcrossAllLayers = new ArrayList<double[]>(); for (int i = 0; i < numHiddenLayers + 1; i++) { if (i < numHiddenLayers) { double[] biasArrayToBeAdded = new double[numNodesPerHiddenLayer[i]]; previousBiasChangeInWeightsAcrossAllLayers.add(biasArrayToBeAdded); } else { double[] biasArrayForOutputNodesToBeAdded = new double[labels.cols()]; previousBiasChangeInWeightsAcrossAllLayers.add(biasArrayForOutputNodesToBeAdded); } } // temporarily stashed bias weights across all layers temporarilyStashedChangeInBiasWeightsAcrossAllLayers = new ArrayList<double[]>(); for (int i = 0; i < numHiddenLayers + 1; i++) { if (i < numHiddenLayers) { double[] biasArrayToBeAdded = new double[numNodesPerHiddenLayer[i]]; temporarilyStashedChangeInBiasWeightsAcrossAllLayers.add(biasArrayToBeAdded); } else { double[] biasArrayForOutputNodesToBeAdded = new double[labels.cols()]; temporarilyStashedChangeInBiasWeightsAcrossAllLayers.add(biasArrayForOutputNodesToBeAdded); } } changeInBiasArrayForEveryLayer = new ArrayList<double[]>(); for (int i = 0; i < numHiddenLayers + 1; i++) { if (i < numHiddenLayers) { double[] biasArrayToBeAdded = new double[numNodesPerHiddenLayer[i]]; changeInBiasArrayForEveryLayer.add(biasArrayToBeAdded); } else { double[] biasArrayForOutputNodesToBeAdded = new double[labels.cols()]; changeInBiasArrayForEveryLayer.add(biasArrayForOutputNodesToBeAdded); } } // INITIALIZE BIAS FOR HIDDEN AND OUTPUT NEURONS // Stochastic weight update // SOMEHOW GOT TO INITIALIZE ALL OF THIS, ADD BLANKS, SO THAT LATER WE CAN // storedFNetForEachLayer.set( i, blah ); storedFNetForEachLayer = new ArrayList<double[]>(); // f_net is the output that is fed into the next layer for (int i = 0; i < numHiddenLayers + 2; i++) { // WE HAVE ONE MORE layer of fnet( consider inputs as fnet) double[] thisLayersFNetValues; // COULD DO IF/ELSE STATEMENTS IF WE ARE LOOKING AT INPUTS, OR THEN HIDDEN NODES, if (i == 0) { thisLayersFNetValues = new double[features.cols()]; // FIND OUT # NODES AT EACH LEVEL } else if (i == numHiddenLayers + 1) { // OR IS IT +1 thisLayersFNetValues = new double[labels.cols()]; // FIND OUT # NODES AT EACH LEVEL } else { thisLayersFNetValues = new double[numNodesPerHiddenLayer[i - 1]]; // FIND OUT # NODES AT EACH LEVEL } storedFNetForEachLayer.add(thisLayersFNetValues); } // -----BEGIN THE TRAINING----- double netValAtNode = 0; double fOfNetValAtNode = 0; for (int epoch = 0; epoch < 10000; epoch++) { // For each epoch, cap it at 10000, we want to avoid infinite loop System.out.println("---Epoch " + epoch + "---"); for (int instance = 0; instance < features.rows(); instance++) { // later we will swap this Matrix for featuresForTrainingTrimmed // GO FORWARD // --------------------------------------------------------------------------------------------------------------------- // System.out.println("Forward propagating..."); for (int layer = 0; layer < numHiddenLayers + 2; layer++) { // HERE LAYER DENOTES HIDDEN LAYER if (layer == 0) { storedFNetForEachLayer.set( layer, Arrays.copyOf(features.row(instance), features.row(0).length)); continue; } double[] thisLayersFNetValues = storedFNetForEachLayer.get( layer); // make a new array of doubles CAN I PLEASE DELETE THIS LINE OF CODE for (int node = 0; node < storedFNetForEachLayer.get(layer).length; node++) { netValAtNode = 0; // FIND THE CROSS PRODUCT; // use a for loop to multiply each col of weights vector by each col of // outputsFromPreviousLayer for (int colInInputVector = 0; colInInputVector < storedFNetForEachLayer.get(layer - 1).length; colInInputVector++) { netValAtNode += (storedFNetForEachLayer.get(layer - 1)[colInInputVector] * arrayListOfEachLayersWeightMatrices.get(layer - 1)[colInInputVector][node]); } netValAtNode += (biasWeightsAcrossAllLayers.get(layer - 1)[node]); if (netValAtNode < 0) { // make special function fOfNetValAtNode = (1 / (1 + Math.pow(Math.E, (-1 * netValAtNode)))); } else { // normal fOfNetValAtNode = (1 / (1 + (1 / (Math.pow( Math.E, (netValAtNode)))))); // if it was positive, then we raise to neg // exponent } thisLayersFNetValues[node] = fOfNetValAtNode; // stick it into the object } storedFNetForEachLayer.set( layer, thisLayersFNetValues); // or if we are editing object, this is not even necessary // DOUBLE CHECK } // ---NOW FOR THIS INSTANCE, GO // BACKWARDS----------------------------------------------------------------------------------------------------------------------- // System.out.println("Back propagating..."); // UPDATE THE WEIGHTS for (int layer = numHiddenLayers + 1; layer > 0; layer--) { // ACROSS EACH LAYER BACKWARD if (layer == numHiddenLayers + 1) { // THIS IS AN OUTPUT LAYER for (int node = 0; node < labels.cols(); node++) { double deltaArrayForThisLayer[] = arrayListOfEachLayersDeltaArray.get(layer); deltaArrayForThisLayer[node] = ((labels.get(instance, node) - storedFNetForEachLayer.get(layer)[node]) * (storedFNetForEachLayer.get(layer)[node]) * (1 - (storedFNetForEachLayer.get(layer)[node]))); // should automatically be set since we get the objects address from heap memory, and // change it for (int inputToThisNode = 0; inputToThisNode < numNodesPerHiddenLayer[layer - 2] + 1; inputToThisNode++) { double changeInWeightBetweenIJ = 0; if (inputToThisNode == numNodesPerHiddenLayer[layer - 2]) { // this is a bias node changeInWeightBetweenIJ = (learningRate * 1 * arrayListOfEachLayersDeltaArray .get(layer)[node]); // NEED TO ADD STUFF FOR MOMENTUM double[] thisLayersBiasWeights = changeInBiasArrayForEveryLayer.get( layer - 1); // NEED TO ADD STUFF FOR MOMENTUM thisLayersBiasWeights[node] = (changeInWeightBetweenIJ); // NEED TO ADD STUFF FOR MOMENTUM } else { changeInWeightBetweenIJ = (learningRate * storedFNetForEachLayer.get(layer - 1)[inputToThisNode] * arrayListOfEachLayersDeltaArray.get(layer)[node]); // double[][] thisLayersWeightMatrix = // arrayListOfEachLayersWeightMatrices.get(layer-1); // thisLayersWeightMatrix[inputToThisNode][node] += ( changeInWeightBetweenIJ ); double[][] changeInWeightsMatrixForThisLayer = changeInWeightMatricesForEveryLayer.get(layer - 1); changeInWeightsMatrixForThisLayer[inputToThisNode][node] = changeInWeightBetweenIJ; } } } } else { for (int node = 0; node < numNodesPerHiddenLayer[layer - 1] + 1; node++) { // ACROSS EACH HIDDEN LAYER (ie these are not output nodes) double deltaArrayForThisLayer[] = arrayListOfEachLayersDeltaArray.get(layer); if (node == numNodesPerHiddenLayer[layer - 1]) { // this is a bias node // change in weight = learningRate * } else { // this is not a bias node double summedOutgoingWeightsCrossOutputDelta = 0; for (int outgoingEdgeToOutgoingNode = 0; outgoingEdgeToOutgoingNode < arrayListOfEachLayersDeltaArray.get(layer + 1).length; outgoingEdgeToOutgoingNode++) { summedOutgoingWeightsCrossOutputDelta += (arrayListOfEachLayersDeltaArray.get(layer + 1)[outgoingEdgeToOutgoingNode] * arrayListOfEachLayersWeightMatrices .get(layer)[node][outgoingEdgeToOutgoingNode]); } deltaArrayForThisLayer[node] = ((summedOutgoingWeightsCrossOutputDelta) * (storedFNetForEachLayer.get(layer)[node]) * (1 - (storedFNetForEachLayer.get(layer)[node]))); // should automatically be set since we get the objects address from heap memory, // and change it if (layer == 1) { // need a for loop across the neural net's input nodes for (int inputToTheNeuralNet = 0; inputToTheNeuralNet < features.cols() + 1; inputToTheNeuralNet++) { double changeInWeightBetweenIJ = 0; if (inputToTheNeuralNet == features.cols()) { // then we know that this is our bias node changeInWeightBetweenIJ = (learningRate * 1 * arrayListOfEachLayersDeltaArray .get(layer)[node]); // NEED TO ADD STUFF FOR MOMENTUM double[] thisLayersBiasWeights = changeInBiasArrayForEveryLayer.get( layer - 1); // NEED TO ADD STUFF FOR MOMENTUM thisLayersBiasWeights[node] = (changeInWeightBetweenIJ); // NEED TO ADD STUFF FOR MOMENTUM } else { changeInWeightBetweenIJ = (learningRate * storedFNetForEachLayer.get(layer - 1)[inputToTheNeuralNet] * arrayListOfEachLayersDeltaArray.get(layer)[node]); double[][] changeInWeightsMatrixForThisLayer = changeInWeightMatricesForEveryLayer.get(layer - 1); changeInWeightsMatrixForThisLayer[inputToTheNeuralNet][node] = changeInWeightBetweenIJ; } } } else { for (int inputToThisNode = 0; inputToThisNode < numNodesPerHiddenLayer[layer - 2] + 1; inputToThisNode++) { double changeInWeightBetweenIJ = 0; if (inputToThisNode == numNodesPerHiddenLayer[layer - 2]) { // this is a bias node changeInWeightBetweenIJ = (learningRate * 1 * arrayListOfEachLayersDeltaArray .get(layer)[node]); // NEED TO ADD STUFF FOR MOMENTUM double[] thisLayersBiasWeights = changeInBiasArrayForEveryLayer.get( layer - 1); // NEED TO ADD STUFF FOR MOMENTUM thisLayersBiasWeights[node] = (changeInWeightBetweenIJ); // NEED TO ADD STUFF FOR MOMENTUM } else { changeInWeightBetweenIJ = (learningRate * storedFNetForEachLayer.get(layer - 1)[inputToThisNode] * arrayListOfEachLayersDeltaArray.get(layer)[node]); // double[][] thisLayersWeightMatrix = // arrayListOfEachLayersWeightMatrices.get(layer-1); // thisLayersWeightMatrix[inputToThisNode][node] += ( changeInWeightBetweenIJ // ); double[][] changeInWeightsMatrixForThisLayer = changeInWeightMatricesForEveryLayer.get(layer - 1); changeInWeightsMatrixForThisLayer[inputToThisNode][node] = changeInWeightBetweenIJ; } } } } } } } // System.out.printf( "e_0=%.17f, e_1=%.17f, e_2=%.17f, e_3=%.17f\n" , // arrayListOfEachLayersDeltaArray.get(2)[0], arrayListOfEachLayersDeltaArray.get(1)[0] , // arrayListOfEachLayersDeltaArray.get(1)[1] , // arrayListOfEachLayersDeltaArray.get(1)[2]); // System.out.println("Descending Gradient..."); // // PUT TEMPORARILY STASHED INTO PREVIOUS // // ONLY HERE SHOULD WE PUT IN THE STASHED WEIGHTS INTO THE PREVIOUS-STASH-SPOT // // PUT STASHED INTO PREVIOUS // // // update the bias weights // GET NEW CHANGE IN WEIGHT THANKS TO MOMENTUM, PLACE IN PREVIOUS SPOT // should be changeInBiasArrayForEveryLayer not for (int w = 0; w < previousBiasChangeInWeightsAcrossAllLayers.size(); w++) { for (int y = 0; y < previousBiasChangeInWeightsAcrossAllLayers.get(w).length; y++) { double currentChangeInWeightVal = changeInBiasArrayForEveryLayer.get(w)[y]; double[] fullBiasWeightList = biasWeightsAcrossAllLayers.get(w); double previousXYCoordInBiasWeightMatrix = previousBiasChangeInWeightsAcrossAllLayers.get(w)[y]; double thisIsTheWeightChangeIncludingMomentum = (currentChangeInWeightVal + (momentum * previousXYCoordInBiasWeightMatrix)); fullBiasWeightList[y] += thisIsTheWeightChangeIncludingMomentum; double[] arrayOfPreviousBiases = previousBiasChangeInWeightsAcrossAllLayers.get(w); arrayOfPreviousBiases[y] = thisIsTheWeightChangeIncludingMomentum; } } // GET NEW CHANGE IN WEIGHT THANKS TO MOMENTUM, PLACE IN PREVIOUS SPOT // We update the weights ( by adding the changes in weights to the weight matrices) after // every layer has been processed for (int w = 0; w < arrayListOfEachLayersWeightMatrices.size(); w++) { for (int y = 0; y < arrayListOfEachLayersWeightMatrices.get(w).length; y++) { for (int z = 0; z < arrayListOfEachLayersWeightMatrices.get(w)[y].length; z++) { double currentXYCoordInMatrix = changeInWeightMatricesForEveryLayer.get(w)[y][z]; double[] fullWeightListForLayer = arrayListOfEachLayersWeightMatrices.get(w)[y]; double previousXYCoordInChangeInWeightMatrix = previousChangeInWeightMatricesForEachLayer.get(w)[y][z]; double thisIsTheWeightChangeIncludingMomentum = (currentXYCoordInMatrix + (previousXYCoordInChangeInWeightMatrix * momentum)); fullWeightListForLayer[z] += thisIsTheWeightChangeIncludingMomentum; double[][] arrayOfPreviousBiases = previousChangeInWeightMatricesForEachLayer.get(w); arrayOfPreviousBiases[y][z] = thisIsTheWeightChangeIncludingMomentum; // newWeight(at next round t+1) = learningRate * delta_at_node_we_feed_into * Xi + // momentum_parameter * change_in_weight_at_t // momentum goes into the weight updates ( not in the change in weights) } } } // System.out.printf( "w_0=%.17f, w_1=%.17f, w_2=%.17f, w_3=%.17f, w_4=%.17f, // w_5=%.17f,\n w_6=%.17f, w_7=%.17f, w_8=%.17f, w_9=%.17f," + // "w_10=%.17f, w_11=%.17f,\n w_12=%.17f\n" , // biasWeightsAcrossAllLayers.get(1)[0], // arrayListOfEachLayersWeightMatrices.get(1)[0][0] , // arrayListOfEachLayersWeightMatrices.get(1)[1][0] , // arrayListOfEachLayersWeightMatrices.get(1)[2][0] , biasWeightsAcrossAllLayers.get(0)[0], // arrayListOfEachLayersWeightMatrices.get(0)[0][0], // arrayListOfEachLayersWeightMatrices.get(0)[1][0], biasWeightsAcrossAllLayers.get(0)[1], // arrayListOfEachLayersWeightMatrices.get(0)[0][1], // arrayListOfEachLayersWeightMatrices.get(0)[1][1], // arrayListOfEachLayersWeightMatrices.get(0)[0][2], // biasWeightsAcrossAllLayers.get(0)[2], // arrayListOfEachLayersWeightMatrices.get(0)[0][2], // arrayListOfEachLayersWeightMatrices.get(0)[1][2]); // // ONLY AFTER THIS POINT HAS EVERY LAYER BEEN PROCESSED } // if( STOPPING CRITERIA MET ) { // HAVE TO USE THE VALIDATION SET THIS TIME FOR THE STOPPING // CRITERION currentAccuracy = calculateMSEOnValidationSet(featuresValidationSet, labelsValidationSet); // currentAccuracy = calculateMSEOnValidationSet( features , labels ); // On the training set // now System.out.println(" Current MSE on epoch # " + epoch + " is: " + currentAccuracy); currentAccuracyIndex++; recentAccuracies[currentAccuracyIndex % 5] = currentAccuracy; double sumAccuracies = 0; if (currentAccuracyIndex > 5) { for (int i = 0; i < recentAccuracies.length; i++) { sumAccuracies += Math.abs(recentAccuracies[currentAccuracyIndex % 5] - recentAccuracies[i]); } if (sumAccuracies < 0.01) { // we only stop training when measureAccuracy after 5 epochs does not increase // by 0.01 break; } } // In theory, it would be wise here to go back to the old best weights because now we're // already overfitting if the stopping criterion is met features.shuffle( rand, labels); // MUST SHUFFLE DATA ROWS AFTER EACH EPOCH,labels is the buddy matrix } return; }
/* * We feed in */ public void predict(double[] features, double[] labels) throws Exception { double netValAtNode = 0; double fOfNetValAtNode = 0; // ALL I HAVE TO DO IS SEND IT FORWARD THROUGH THE NETWORK // only get one instance at a time for (int layer = 0; layer < numHiddenLayers + 2; layer++) { // HERE LAYER DENOTES HIDDEN LAYER if (layer == 0) { storedFNetForEachLayer.set(layer, Arrays.copyOf(features, features.length)); continue; } double[] thisLayersFNetValues = storedFNetForEachLayer.get( layer); // make a new array of doubles CAN I PLEASE DELETE THIS LINE OF CODE for (int node = 0; node < storedFNetForEachLayer.get(layer).length; node++) { netValAtNode = 0; // FIND THE CROSS PRODUCT; // use a for loop to multiply each col of weights vector by each col of // outputsFromPreviousLayer for (int colInInputVector = 0; colInInputVector < storedFNetForEachLayer.get(layer - 1).length; colInInputVector++) { netValAtNode += (storedFNetForEachLayer.get(layer - 1)[colInInputVector] * arrayListOfEachLayersWeightMatrices.get(layer - 1)[colInInputVector][node]); } netValAtNode += (biasWeightsAcrossAllLayers.get(layer - 1)[node]); if (netValAtNode < 0) { // make special function fOfNetValAtNode = (1 / (1 + Math.pow(Math.E, (-1 * netValAtNode)))); } else { // normal fOfNetValAtNode = (1 / (1 + (1 / (Math.pow( Math.E, (netValAtNode)))))); // if it was positive, then we raise to neg // exponent } thisLayersFNetValues[node] = fOfNetValAtNode; // stick it into the object } storedFNetForEachLayer.set( layer, thisLayersFNetValues); // or if we are editing object, this is not even necessary DOUBLE // CHECK } // pick the output that the network says it is // return it // put what is beyond the hidden nodes into the labels matrix double maxPredictedFOfNetVal = -999999; int predictedClass = 0; double[] storedOutputNodeFNetValues = new double[storedFNetForEachLayer.get(numHiddenLayers + 1).length]; double[] storedOutputNodeTargetValues = new double[storedFNetForEachLayer.get(numHiddenLayers + 1).length]; for (int i = 0; i < storedFNetForEachLayer.get(numHiddenLayers + 1).length; i++) { if (labels.length > 1) { storedOutputNodeFNetValues[i] = storedFNetForEachLayer.get(numHiddenLayers + 1)[i]; storedOutputNodeTargetValues[i] = labels[i]; // time to go ahead and save what we had at each output node } if (storedFNetForEachLayer.get(numHiddenLayers + 1)[i] > maxPredictedFOfNetVal) { predictedClass = i; maxPredictedFOfNetVal = storedFNetForEachLayer.get(numHiddenLayers + 1)[i]; } } labels[0] = predictedClass; globalStoredOutputNodeFNetValues = storedOutputNodeFNetValues; globalStoredOutputNodeTargetValues = storedOutputNodeTargetValues; // labels is not expecting 0,1,0 IT IS EXPECTING 0,1,2 }