Project Name File Name Usage Source Code Link
soundfingerprinting NetworkPerformanceMeter.cs Parallel.For(0, n, (i) => { IMLData actualOutputs = network.Compute(dataset.Data[i].Input); lock (LockObject) { for (int j = 0, k = actualOutputs.Count; j < k; j++) if ((actualOutputs[j] > threshold && dataset.Data[i].Ideal[j] > threshold) || (actualOutputs[j] < threshold && dataset.Data[i].Ideal[j] < threshold)) correctBits++; } }) Link To Source File
Illias UniqueTimestampTests.cs Parallel.For(0, _NumberOfItems, i => { var _UniqueTS = new UniqueTimestamps(); _UniqueTS.ThreadId = Thread.CurrentThread.ManagedThreadId; _UniqueTS.Timestamp = UniqueTimestamp.Ticks; _ConcurrentBag.Add(_UniqueTS); }) Link To Source File
Illias UniqueTimestampTests.cs Parallel.For(0, _NumberOfItems, j => { var _UniqueTS = new UniqueTimestamps(); _UniqueTS.ThreadId = Thread.CurrentThread.ManagedThreadId; _UniqueTS.Timestamp = UniqueTimestamp.Ticks; _ConcurrentBag.Add(_UniqueTS); }) Link To Source File
siren LastFMUpdater.cs Parallel.For(2, initialPage.TotalPages, new ParallelOptions { MaxDegreeOfParallelism = 10 }, delegate(int index) { var page = API.User.GetRecentTracks(username, index, 200, from); foreach (var track in page.TrackPlays) { AddTrackPlay(track); } }) Link To Source File
lucene.net TestSupportClass.cs System.Threading.Tasks.Parallel.For(0, LoopCount, (i) => { analyzers[i] = new Lucene.Net.Analysis.Standard.StandardAnalyzer(); dirs[i] = new RAMDirectory(); indexWriters[i] = new IndexWriter(dirs[i], analyzers[i], true); }) Link To Source File
lucene.net TestSupportClass.cs System.Threading.Tasks.Parallel.For(0, LoopCount, (i) => { Document document = new Document(); document.Add(new Field("field", "some test", Field.Store.NO, Field.Index.ANALYZED)); indexWriters[i].AddDocument(document); }) Link To Source File
lucene.net TestSupportClass.cs System.Threading.Tasks.Parallel.For(0, LoopCount, (i) => { analyzers[i].Close(); indexWriters[i].Close(); }) Link To Source File
lucene.net TestSupportClass.cs System.Threading.Tasks.Parallel.For(0, LoopCount, (i) => { IndexSearcher searcher = new IndexSearcher(dirs[i]); TopDocs d = searcher.Search(new TermQuery(new Term("field", "test")), 10); searcher.Close(); }) Link To Source File
lucene.net TestSupportClass.cs System.Threading.Tasks.Parallel.For(0, LoopCount, (i) => dirs[i].Close()) Link To Source File
stahc CloudBlockBlobExtension.cs Parallel.For(0, transferDetails.Length, j => { using (FileStream fs = new FileStream(file.FullName, FileMode.Open, FileAccess.Read)) { byte[] buff = new byte[transferDetails[j].BytesToRead]; BinaryReader br = new BinaryReader(fs); // move the file system reader to the proper position fs.Seek(transferDetails[j].StartPosition, SeekOrigin.Begin); br.Read(buff, 0, transferDetails[j].BytesToRead); if (buff.Length > 0) { // calculate the block-level hash string blockHash = Helpers.GetMD5HashFromStream(buff); //blob.PutBlock(transferDetails[j].BlockId, new MemoryStream(buff), blockHash, options); blob.PutBlock(transferDetails[j].BlockId, new MemoryStream(buff), null, options); } } }) Link To Source File
stahc CloudBlockBlobExtension.cs Parallel.For(0, transferDetails.Length, j => { // get the blob as a stream using (BlobStream stream = blob.OpenRead()) { // move to the proper location stream.Seek(transferDetails[j].StartPosition, SeekOrigin.Begin); // setup a buffer with the proper size byte[] buff = new byte[transferDetails[j].BytesToRead]; // read into the buffer stream.Read(buff, 0, transferDetails[j].BytesToRead); // flush the buffer to disk using (Stream fileStream = new FileStream(transferDetails[j].BlockId, FileMode.Create, FileAccess.Write, FileShare.None)) { using (BinaryWriter bw = new BinaryWriter(fileStream)) { bw.Write(buff); bw.Close(); } } buff = null; } }) Link To Source File
Modl PerformanceTests.cs Parallel.For(0, threads, i => { TimeMethod(db, iterations, cache, testMethod); }) Link To Source File
sable-fx Matrix44.cs Parallel.For(0, vectors.Length, i => { results[i] = mat.Transform(vectors[i]); }) Link To Source File
sable-fx Matrix44.cs Parallel.For(0, vectors.Length, i => { results[i] = mat.Transform(vectors[i]); }) Link To Source File
sable-fx Ray3IntersectionExtensions.cs Parallel.For(0, count, () => { // Clone the supplied ray cast info, then share it for all items on this thread. var localRayCastInfo = rayCastInfo.DeepClone(); var localRayInterInfo = (Ray3Triangle3IntersectionInfo)localRayCastInfo.IntersectionInfo; return new Tuple(localRayCastInfo, localRayInterInfo); }, (i, loop, info) => { Triangle3 triangle = triangleList[i]; var localRayInterInfo = info.Item2; if (triangle.IntersectsRay3(ray, ref localRayInterInfo)) { var localRayCastInfo = info.Item1; localRayCastInfo.TargetPrimitiveIndex = i; localRayCastInfo.Distance = localRayInterInfo.t; if (worldTransform == null) { localRayCastInfo.Position = new Double.Vec3(triangle.p1 * localRayInterInfo.b1 + triangle.p2 * localRayInterInfo.b2 + triangle.p3 * localRayInterInfo.b3); } else { localRayCastInfo.Position = worldTransform.Matrix.Transform( new Double.Vec3(triangle.p1 * localRayInterInfo.b1 + triangle.p2 * localRayInterInfo.b2 + triangle.p3 * localRayInterInfo.b3)); } hitAction(localRayCastInfo); } return info; }, info => { }) Link To Source File
sable-fx Ray3IntersectionExtensions.cs Parallel.For(0, count, i => { Triangle3 triangle = triangleList[i]; var rayInterInfo = new Ray3Triangle3IntersectionInfo(); if (triangle.IntersectsRay3(ray, ref rayInterInfo)) { bool lockTaken = false; try { sl.Enter(ref lockTaken); if (rayInterInfo.t < closestInterInfo.t) closestInterInfo.Copy(rayInterInfo); } finally { if (lockTaken) sl.Exit(false); } } }) Link To Source File
sable-fx Ray3IntersectionExtensions.cs Parallel.For(0, count, () => { // Clone the supplied ray cast info, then share it for all items on this thread. var localRayCastInfo = rayCastInfo.DeepClone(); var localRayInterInfo = (Ray3Triangle3IntersectionInfo)localRayCastInfo.IntersectionInfo; return new Tuple(localRayCastInfo, localRayInterInfo); }, (i, loop, info) => { Triangle3 triangle = triangleList[i]; var localRayInterInfo = info.Item2; if (triangle.IntersectsRay3(ray, ref localRayInterInfo)) { var localRayCastInfo = info.Item1; localRayCastInfo.TargetPrimitiveIndex = i; localRayCastInfo.Distance = localRayInterInfo.t; if (worldTransform == null) { localRayCastInfo.Position = new Double.Vec3(triangle.p1 * localRayInterInfo.b1 + triangle.p2 * localRayInterInfo.b2 + triangle.p3 * localRayInterInfo.b3); } else { localRayCastInfo.Position = worldTransform.Matrix.Transform( new Double.Vec3(triangle.p1 * localRayInterInfo.b1 + triangle.p2 * localRayInterInfo.b2 + triangle.p3 * localRayInterInfo.b3)); } hitAction(localRayCastInfo); } return info; }, info => { }) Link To Source File
sable-fx Ray3IntersectionExtensions.cs Parallel.For(0, count, i => { Triangle3 triangle = triangleList[i]; var rayInterInfo = new Ray3Triangle3IntersectionInfo(); if (triangle.IntersectsRay3(ray, ref rayInterInfo)) { bool lockTaken = false; try { sl.Enter(ref lockTaken); if (rayInterInfo.t < closestInterInfo.t) closestInterInfo.Copy(rayInterInfo); } finally { if (lockTaken) sl.Exit(false); } } }) Link To Source File
BTDB SpeedTest1.cs Parallel.For(0, 4, iter => { using (var tr = db.StartTransaction()) { var key = new byte[4000]; for (int i = 0; i < 30000; i++) { key[3] = (byte)(i % 256); key[2] = (byte)(i / 256 % 256); key[1] = (byte)(i / 256 / 256 % 256); key[0] = (byte)(i / 256 / 256 / 256); if (!tr.FindExactKey(key)) Trace.Assert(false); //var val = tr.ReadValue(); //Trace.Assert(key.SequenceEqual(val)); } } }) Link To Source File
WCFQuickSamples Program.cs Parallel.For( 0, ids.Count, (t) => { var id = ids[t]; var b = request[id]; lock (b) { if (b.Name == 4.ToString()) { Thread.Sleep(10000); } Console.WriteLine(b.Name); } }) Link To Source File
sshnet TestPortForwarding.NET40.cs System.Threading.Tasks.Parallel.For(0, 100, //new ParallelOptions //{ // MaxDegreeOfParallelism = 20, //}, (counter) => { var start = DateTime.Now; var req = HttpWebRequest.Create("http://localhost:8084"); using (var response = req.GetResponse()) { var data = ReadStream(response.GetResponseStream()); var end = DateTime.Now; Debug.WriteLine(string.Format("Request# {2}: Lenght: {0} Time: {1}", data.Length, (end - start), counter)); } } ) Link To Source File
sshnet TestPortForwarding.NET40.cs System.Threading.Tasks.Parallel.For(0, 5, //new ParallelOptions //{ // MaxDegreeOfParallelism = 1, //}, (counter) => { var cmd = client.CreateCommand(string.Format("wget -O- http://localhost:{0}", boundport)); var result = cmd.Execute(); var end = DateTime.Now; Debug.WriteLine(string.Format("Length: {0}", result.Length)); } ) Link To Source File
sshnet TestPortForwarding.NET40.cs System.Threading.Tasks.Parallel.For(0, 100, //new ParallelOptions //{ // MaxDegreeOfParallelism = 20, //}, (counter) => { var start = DateTime.Now; var req = HttpWebRequest.Create("http://localhost:8084"); using (var response = req.GetResponse()) { var data = ReadStream(response.GetResponseStream()); var end = DateTime.Now; Debug.WriteLine(string.Format("Request# {2}: Length: {0} Time: {1}", data.Length, (end - start), counter)); } } ) Link To Source File
sshnet TestSshCommand.NET40.cs System.Threading.Tasks.Parallel.For(0, 10000, () => { var client = new SshClient(Resources.HOST, Resources.USERNAME, Resources.PASSWORD); client.Connect(); return client; }, (int counter, ParallelLoopState pls, SshClient client) => { var result = ExecuteTestCommand(client); Debug.WriteLine(string.Format("TestMultipleThreadMultipleConnections #{0}", counter)); Assert.IsTrue(result); return client; }, (SshClient client) => { client.Disconnect(); client.Dispose(); } ) Link To Source File
sshnet TestSshCommand.NET40.cs System.Threading.Tasks.Parallel.For(0, 10000, (counter) => { var result = ExecuteTestCommand(client); Debug.WriteLine(string.Format("TestMultipleThreadMultipleConnections #{0}", counter)); Assert.IsTrue(result); } ) Link To Source File
c-raft CustomGenerator.cs Parallel.For(0, 16, x => { int offsetX = (x / 4) * 4; for (int y = 0; y < 128; y++) { int offsetY = (y / 8) * 8; for (int z = 0; z < 16; z++) { if (!(x%4 == 0 && y%8 == 0 && z%4 == 0)) { int offsetZ = (z/4)*4; densityMap[x, y, z] = triLerp(x, y, z, densityMap[offsetX, offsetY, offsetZ], densityMap[offsetX, offsetY + 8, offsetZ], densityMap[offsetX, offsetY, offsetZ + 4], densityMap[offsetX, offsetY + 8, offsetZ + 4], densityMap[4 + offsetX, offsetY, offsetZ], densityMap[4 + offsetX, offsetY + 8, offsetZ], densityMap[4 + offsetX, offsetY, offsetZ + 4], densityMap[4 + offsetX, offsetY + 8, offsetZ + 4], offsetX, 4 + offsetX, offsetY, 8 + offsetY, offsetZ, offsetZ + 4); } } } }) Link To Source File
c-raft Server.cs Parallel.For(0, count, i => { Client client; if (!SendClientQueue.TryDequeue(out client)) return; if (!client.Running) { client.DisposeSendSystem(); return; } client.Send_Start(); }) Link To Source File
c-raft Server.cs Parallel.For(0, count, i => { Client client; if (!RecvClientQueue.TryDequeue(out client)) return; if (!client.Running) return; Interlocked.Exchange(ref client.TimesEnqueuedForRecv, 0); ByteQueue bufferToProcess = client.GetBufferToProcess(); int length = client.FragPackets.Size + bufferToProcess.Size; while (length > 0) { byte packetType = 0; if (client.FragPackets.Size > 0) packetType = client.FragPackets.GetPacketID(); else packetType = bufferToProcess.GetPacketID(); //client.Logger.Log(Chraft.Logger.LogLevel.Info, "Reading packet {0}", ((PacketType)packetType).ToString()); PacketHandler handler = PacketHandlers.GetHandler((PacketType)packetType); if (handler == null) { byte[] unhandledPacketData = GetBufferToBeRead(bufferToProcess, client, length); // TODO: handle this case, writing on the console a warning and/or writing it plus the bytes on a log client.Logger.Log(Chraft.Logger.LogLevel.Caution, "Unhandled packet arrived, id: {0}", unhandledPacketData[0]); client.Logger.Log(Chraft.Logger.LogLevel.Warning, "Data:/r/n {0}", BitConverter.ToString(unhandledPacketData, 1)); length = 0; } else if (handler.Length == 0) { byte[] data = GetBufferToBeRead(bufferToProcess, client, length); if (length >= handler.MinimumLength) { PacketReader reader = new PacketReader(data, length); handler.OnReceive(client, reader); // If we failed it's because the packet isn't complete if (reader.Failed) { EnqueueFragment(client, data); length = 0; } else { bufferToProcess.Enqueue(data, reader.Index, data.Length - reader.Index); length = bufferToProcess.Length; } } else { EnqueueFragment(client, data); length = 0; } } else if (length >= handler.Length) { byte[] data = GetBufferToBeRead(bufferToProcess, client, handler.Length); PacketReader reader = new PacketReader(data, handler.Length); handler.OnReceive(client, reader); // If we failed it's because the packet is wrong if (reader.Failed) { client.MarkToDispose(); length = 0; } else length = bufferToProcess.Length; } else { byte[] data = GetBufferToBeRead(bufferToProcess, client, length); EnqueueFragment(client, data); length = 0; } } }) Link To Source File
c-raft WorldManager.cs Parallel.For(0, entry.Requests.Count, (i) => { ClientRequest req; if (!entry.Requests.TryDequeue(out req)) return; if (entry.ChunkRequested.LightToRecalculate) entry.ChunkRequested.RecalculateSky(); req.ClientRequesting.Owner.LoadedChunks.TryUpdate(entry.ChunkRequested.Coords.ChunkPackedCoords, entry.ChunkRequested, null); entry.ChunkRequested.AddClient(req.ClientRequesting); req.ClientRequesting.SendPreChunk(entry.ChunkRequested.Coords.ChunkX, entry.ChunkRequested.Coords.ChunkZ, true, false); req.ClientRequesting.SendChunk(entry.ChunkRequested, false); }) Link To Source File
Kinect-Annotation-and-Evaluation-Tool OpenNIImageProvider.cs Parallel.For(0, imageMD.YRes, (y) => { byte* pDest = (byte*)data.Scan0.ToPointer() + y * stride; byte* pImage = (byte*)imageGenerator.ImageMapPtr.ToPointer() + y * stride; for (int x = 0; x < imageMD.XRes; ++x, pDest += 3, pImage += 3) { pDest[2] = pImage[0]; pDest[1] = pImage[1]; pDest[0] = pImage[2]; } }) Link To Source File
Kinect-Annotation-and-Evaluation-Tool OpenNIImageProvider.cs Parallel.For(0, imageMD.YRes, (y) => { ushort label; byte* pImage = (byte*)imageGenerator.ImageMapPtr.ToPointer() + y * stride; ushort* pLabels = userInformation + y * imageMD.XRes; byte* pDest = (byte*)data.Scan0.ToPointer() + y * stride; for (int x = 0; x < imageMD.XRes; ++x, ++pLabels, pDest += 3, pImage += 3) { pDest[0] = pDest[1] = pDest[2] = 0; label = *pLabels; if (drawBackground || *pLabels != 0 && !background_users.Contains(*pLabels)) { Color labelColor = Color.White; if (label != 0 && drawHighlight) { labelColor = COLORS[label % NCOLORS]; } pDest[2] = (byte)(pImage[0] * (labelColor.B / 256.0)); pDest[1] = (byte)(pImage[1] * (labelColor.G / 256.0)); pDest[0] = (byte)(pImage[2] * (labelColor.R / 256.0)); } } }) Link To Source File
Kinect-Annotation-and-Evaluation-Tool OpenNIImageProvider.cs Parallel.For(0, depthMD.YRes, (y) => { ushort* pDepth = (ushort*)this.depthGenerator.DepthMapPtr.ToPointer() + y * depthMD.XRes; byte* pDest = (byte*)data.Scan0.ToPointer() + y * data.Stride; for (int x = 0; x < depthMD.XRes; ++x, pDest += 3, pDepth++) { pDest[0] = pDest[1] = pDest[2] = 0; //byte pixel = (byte)((*pDepth) / depthMax * 255.0); byte pixel = (byte)histogram[*pDepth]; pDest[2] = pixel; pDest[1] = pixel; pDest[0] = pixel; } }) Link To Source File
Kinect-Annotation-and-Evaluation-Tool OpenNIImageProvider.cs Parallel.For(0, depthMD.YRes, (y) => { ushort* pLabels = userInformation + y * depthMD.XRes; byte* pDest = (byte*)data.Scan0.ToPointer() + y * data.Stride; ushort* pDepth = (ushort*)this.depthGenerator.DepthMapPtr.ToPointer() + y * depthMD.XRes; for (int x = 0; x < depthMD.XRes; ++x, ++pLabels, pDest += 3, pDepth++) { pDest[0] = pDest[1] = pDest[2] = 0; ushort label = *pLabels; if (drawBackground || *pLabels != 0 && !background_users.Contains(*pLabels)) { Color labelColor = Color.White; if (label != 0 && drawHighlight) { labelColor = COLORS[label % NCOLORS]; } double pixel = (byte)histogram[*pDepth]; pDest[2] = (byte)(pixel * (labelColor.B / 256.0)); pDest[1] = (byte)(pixel * (labelColor.G / 256.0)); pDest[0] = (byte)(pixel * (labelColor.R / 256.0)); } } }) Link To Source File
NGauge Aggregating.cs Parallel.For(0, _list.Count, i => { lock (_list) { if (_list[i].SomeInt > max) { max = _list[i].SomeInt; } } }) Link To Source File
NGauge Filtering.cs Parallel.For(0, _list.Count, i => { if (_list[i].SomeInt == SAMPLE) { values.Enqueue(_list[i]); } }) Link To Source File
NGauge Intersecting.cs Parallel.For(0, _listA.Count, i => { for (int j = 0; j < _listB.Count; j++) { if (_listA[i].SomeInt == _listB[j].SomeInt && _listA[i].SomeString.Equals(_listB[j].SomeString)) { var isNew = true; lock (values) { foreach (var value in values) { if (value.SomeInt == _listA[i].SomeInt && value.SomeString.Equals(_listA[i].SomeString)) { isNew = false; break; } } if (isNew) { values.Add(_listA[i]); } } break; } } }) Link To Source File
NGauge Quantificating.cs Parallel.For(0, _list.Count, i => { if (flag && !_list[i].SomeString.Equals(SAMPLE)) { lock (_list) { flag = false; } } }) Link To Source File
NGauge Transforming.cs Parallel.For(0, _list.Count, i => { values.Enqueue(_list[i].SomeString); }) Link To Source File
EasyNMS Program.cs Parallel.For(0, PARALLEL_PRODUCE_COUNT, (i) => { var msg = producer.MessageFactory.CreateTextMessage("Test #" + i); producer.SendRequest(msg, Apache.NMS.MsgDeliveryMode.NonPersistent, Apache.NMS.MsgPriority.High, new TimeSpan(0, 1, 0)); Console.WriteLine("[{0:HH:mm:ss.fffff}] Sent => {1}", DateTime.Now, msg.Text); }) Link To Source File
localtunnel-net-client TestPortForwarding.NET40.cs System.Threading.Tasks.Parallel.For(0, 100, //new ParallelOptions //{ // MaxDegreeOfParallelism = 20, //}, (counter) => { var start = DateTime.Now; var req = HttpWebRequest.Create("http://localhost:8084"); using (var response = req.GetResponse()) { var data = ReadStream(response.GetResponseStream()); var end = DateTime.Now; Debug.WriteLine(string.Format("Request# {2}: Lenght: {0} Time: {1}", data.Length, (end - start), counter)); } } ) Link To Source File
localtunnel-net-client TestPortForwarding.NET40.cs System.Threading.Tasks.Parallel.For(0, 5, //new ParallelOptions //{ // MaxDegreeOfParallelism = 1, //}, (counter) => { var cmd = client.CreateCommand(string.Format("wget -O- http://localhost:{0}", boundport)); var result = cmd.Execute(); var end = DateTime.Now; Debug.WriteLine(string.Format("Length: {0}", result.Length)); } ) Link To Source File
localtunnel-net-client TestPortForwarding.NET40.cs System.Threading.Tasks.Parallel.For(0, 100, //new ParallelOptions //{ // MaxDegreeOfParallelism = 20, //}, (counter) => { var start = DateTime.Now; var req = HttpWebRequest.Create("http://localhost:8084"); using (var response = req.GetResponse()) { var data = ReadStream(response.GetResponseStream()); var end = DateTime.Now; Debug.WriteLine(string.Format("Request# {2}: Length: {0} Time: {1}", data.Length, (end - start), counter)); } } ) Link To Source File
localtunnel-net-client TestSshCommand.NET40.cs System.Threading.Tasks.Parallel.For(0, 10000, () => { var client = new SshClient(Resources.HOST, Resources.USERNAME, Resources.PASSWORD); client.Connect(); return client; }, (int counter, ParallelLoopState pls, SshClient client) => { var result = ExecuteTestCommand(client); Debug.WriteLine(string.Format("TestMultipleThreadMultipleConnections #{0}", counter)); Assert.IsTrue(result); return client; }, (SshClient client) => { client.Disconnect(); client.Dispose(); } ) Link To Source File
localtunnel-net-client TestSshCommand.NET40.cs System.Threading.Tasks.Parallel.For(0, 10000, (counter) => { var result = ExecuteTestCommand(client); Debug.WriteLine(string.Format("TestMultipleThreadMultipleConnections #{0}", counter)); Assert.IsTrue(result); } ) Link To Source File
PineCone StructureBuilderPreservingId.cs Parallel.For(0, items.Length, i => { var itm = items[i]; var id = structureSchema.IdAccessor.GetValue(itm); structureSchema.IdAccessor.SetValue(itm, id); structures[i] = new Structure( structureSchema.Name, id, IndexesFactory.CreateIndexes(structureSchema, itm, id), StructureSerializer.Serialize(itm)); }) Link To Source File
PineCone StructureBuilder.cs Parallel.For(0, items.Length, i => { var id = structureIds[i]; var itm = items[i]; structureSchema.IdAccessor.SetValue(itm, id); structures[i] = new Structure( structureSchema.Name, id, IndexesFactory.CreateIndexes(structureSchema, itm, id), StructureSerializer.Serialize(itm)); }) Link To Source File
PineCone StructureIndexesFactory.cs Parallel.For(0, indexes.Length, c => { var indexAccessor = structureSchema.IndexAccessors[c]; var values = indexAccessor.GetValues(item); var valuesExists = values != null && values.Count > 0; var isCollectionOfValues = indexAccessor.IsEnumerable || indexAccessor.IsElement || (values != null && values.Count > 1); if (!valuesExists) { if (indexAccessor.IsUnique) throw new PineConeException(ExceptionMessages.StructureIndexesFactory_UniqueIndex_IsNull.Inject(structureSchema.Name, indexAccessor.Path)); return; } if (!isCollectionOfValues) indexes[c] = new[] { new StructureIndex(structureId, indexAccessor.Path, values[0], indexAccessor.DataType, indexAccessor.UniqueMode.ToStructureIndexType()) }; else { var subIndexes = new IStructureIndex[values.Count]; Parallel.For(0, subIndexes.Length, subC => { subIndexes[subC] = new StructureIndex(structureId, indexAccessor.Path, values[subC], indexAccessor.ElementType ?? indexAccessor.DataType, indexAccessor.UniqueMode.ToStructureIndexType()); }); indexes[c] = subIndexes; } }) Link To Source File
PineCone StructureIndexesFactory.cs Parallel.For(0, subIndexes.Length, subC => { subIndexes[subC] = new StructureIndex(structureId, indexAccessor.Path, values[subC], indexAccessor.ElementType ?? indexAccessor.DataType, indexAccessor.UniqueMode.ToStructureIndexType()); }) Link To Source File
SudokuCS TrialAndError.cs Parallel.For(0, 625, (i, loopState) => { int index; lock (this) { index = _index; _index += 36; _index %= sz; } var col = index % 25; var row = index / 25; if (OperateOn(col, row, model)) { loopState.Stop(); } // go to the next cell }) Link To Source File
BrainLab PermDist.xaml.cs //for (var edgeIdx = 0; edgeIdx < 4005; edgeIdx++) Parallel.For(0, 4005, edgeIdx => { var tstat = Math.Abs(TStat(edgeIdx, idxs, edges)); if (tstat != Double.NaN) { var pval = GetPVal(dimension, tstat); if (pval < 0.05) adj[edgeIdx] = pval; else adj[edgeIdx] = 1; } }) Link To Source File
MachineLearning.NET NMF.cs // multiply column (aRow) from w.T by row (aRow) from A (sparse vector) Parallel.For(0, featuresCount, i => { var multiplicationFactor = wT[i, localRowIdxSparseVector.Key]; foreach (var pair in localRowIdxSparseVector.Value) { hn[i, pair.Key] += multiplicationFactor * pair.Value; } }) Link To Source File
MachineLearning.NET NMF.cs // wTw = w.T * w - is symmetric array Parallel.For(0, featuresCount, i => { // optimization: compute right upper half array only for (int j = i; j < featuresCount; j++) { // compute wTw[i,j] as dot product of row i in wT by column j in w var v = GetMultiplicationElement(wT, w, i, j); wTw[i, j] = v; // optimization: copy result to wTw[j,i] (left bottom) if (i != j) { wTw[j, i] = v; } } }) Link To Source File
MachineLearning.NET NMF.cs // hd = (w.T * w) * h Parallel.For(0, featuresCount, i => { for (int j = 0; j < cc; j++) { hd[i, j] = GetMultiplicationElement(wTw, h, i, j); } }) Link To Source File
MachineLearning.NET NMF.cs // update h = h .* hn ./ hd Parallel.For(0, featuresCount, i => { for (int j = 0; j < cc; j++) { var nom = hn[i, j]; var den = hd[i, j]; if (nom != den) { h[i, j] = h[i, j] * nom / den; } } }) Link To Source File
MachineLearning.NET NMF.cs Parallel.For(0, featuresCount, hTColumn => { wn[localRowIdxSparseVector.Key, hTColumn] = localRowIdxSparseVector.Value.Sum(pair => pair.Value * hT[pair.Key, hTColumn]); }) Link To Source File
MachineLearning.NET NMF.cs // hhT = h * h.T - symmetric array Parallel.For(0, featuresCount, i => { // optimization: compute right upper half array only for (int j = i; j < featuresCount; j++) { // compute hhT[i,j] as dot product of row i in h by column j in hT var v = GetMultiplicationElement(h, hT, i, j); hhT[i, j] = v; // optimization: copy result to hhT[j,i] (left bottom) if (i != j) { hhT[j, i] = v; } } }) Link To Source File
MachineLearning.NET NMF.cs // wd = w * (h * h.T) Parallel.For(0, rc, i => { for (int j = 0; j < featuresCount; j++) { wd[i, j] = GetMultiplicationElement(w, hhT, i, j); } }) Link To Source File
MachineLearning.NET NMF.cs // update w = w .* wn ./ wd Parallel.For(0, rc, i => { for (int j = 0; j < featuresCount; j++) { var nom = wn[i, j]; var den = wd[i, j]; if (nom != den) { w[i, j] = w[i, j] * nom / den; } } }) Link To Source File
MachineLearning.NET NMF.cs Parallel.For(0, array.Size0, i => { for (int j = 0; j < array.Size1; j++) { double nextDouble; lock(random) { nextDouble = random.NextDouble(); } array[i, j] = nextDouble; } }) Link To Source File
CorrugatedIron RoundRobinStrategyTests.cs Parallel.For(0, 3, i => { results[i] = DoStuffWithNodes(roundRobin, CreateMockNodes()); }) Link To Source File
CorrugatedIron LoadTests.cs Parallel.For(0, ThreadCount, i => { results[i] = DoMapRed(query); }) Link To Source File
CorrugatedIron LoadTests.cs Parallel.For(0, ThreadCount, i => { results[i] = DoStreamingMapRed(query); }) Link To Source File
MongoMapper.NET InsertModifyDeleteTest.cs Parallel.For (0, 1000, i => { Country c = new Country { Code = i.ToString(), Name = String.Format("Nombre {0}",i) }; c.Save(); } ) Link To Source File
AppMetrics Program.cs Parallel.For(0, ThreadsCount, i => { try { var domain = AppDomain.CreateDomain("TestRunner" + i); var proxy = (TestRunner) domain.CreateInstanceAndUnwrap(proxyType.Assembly.FullName, proxyType.FullName); var subRes = proxy.Execute(_url); Console.WriteLine("Thread result: {0}", subRes); lock (sync) { res += subRes; } } catch (Exception exc) { Console.WriteLine(exc); } }) Link To Source File
ForgeCraft-OutDatedVersion Chunker.cs Parallel.For(0, Server.genThreads, delegate(int wtf) // The int is so the compiler will shut up. { GeneratorLoop(); }) Link To Source File
ForgeCraft-OutDatedVersion World.cs Parallel.For(((int)SpawnX >> 4) - Server.ViewDistance, ((int)SpawnX >> 4) + Server.ViewDistance + 1, delegate(int x) { Parallel.For(((int)SpawnZ >> 4) - Server.ViewDistance, ((int)SpawnZ >> 4) + Server.ViewDistance + 1, delegate(int z) { LoadChunk(x, z, false, false); lock (derpLock) { Console.SetCursorPosition(cursorH, Console.CursorTop); count++; Console.Write((int)((count / total) * 100) + "%"); } }); //Logger.Log(x + " Row Generated."); }) Link To Source File
ForgeCraft-OutDatedVersion World.cs Parallel.For(((int)SpawnZ >> 4) - Server.ViewDistance, ((int)SpawnZ >> 4) + Server.ViewDistance + 1, delegate(int z) { LoadChunk(x, z, false, false); lock (derpLock) { Console.SetCursorPosition(cursorH, Console.CursorTop); count++; Console.Write((int)((count / total) * 100) + "%"); } }) Link To Source File
ForgeCraft-OutDatedVersion World.cs Parallel.For(((int)w.SpawnX >> 4) - Server.ViewDistance, ((int)w.SpawnX >> 4) + Server.ViewDistance + 1, x => { Parallel.For(((int)w.SpawnZ >> 4) - Server.ViewDistance, ((int)w.SpawnZ >> 4) + Server.ViewDistance + 1, z => { w.LoadChunk(x, z, false, false); lock (derpLock) { Console.SetCursorPosition(cursorH, Console.CursorTop); count++; Console.Write((int)((count / total) * 100) + "%"); } }); }) Link To Source File
ForgeCraft-OutDatedVersion World.cs Parallel.For(((int)w.SpawnZ >> 4) - Server.ViewDistance, ((int)w.SpawnZ >> 4) + Server.ViewDistance + 1, z => { w.LoadChunk(x, z, false, false); lock (derpLock) { Console.SetCursorPosition(cursorH, Console.CursorTop); count++; Console.Write((int)((count / total) * 100) + "%"); } }) Link To Source File
RecommendationSystem SvdBoostedKnnTester.cs Parallel.For(0, NumberOfTests, i => { IUser user; do { user = TestUsers[rng.Next(TestUsers.Count)]; } while (user.Ratings.Count < 2); lock (user) { var ratingIndex = rng.Next(user.Ratings.Count); var rating = user.Ratings[ratingIndex]; var originalRatings = user.Ratings; user.Ratings = user.Ratings.Where(r => r != rating).ToList(); var predictedRating = RecommendationSystem.Recommender.PredictRatingForArtist(user, Model, Artists, rating.ArtistIndex); var error = predictedRating - rating.Value; biasBC[(int)rating.Value - 1].Add(error); maeBC[(int)rating.Value - 1].Add(Math.Abs(error)); user.Ratings = originalRatings; Write(string.Format("{0}/t{1}", predictedRating, rating.Value), false); if (maeBC.Sum(bc => bc.Count) % writeFrequency == 0) Write(string.Format("Test {0} with {1} ({2})", TestName, GetMaeBiasAndVariance(biasBC, maeBC), DateTime.Now), toFile: false); } }) Link To Source File
RecommendationSystem KnnTester.cs Parallel.For(0, NumberOfTests, i => { IUser user; do { user = TestUsers[rng.Next(TestUsers.Count)]; } while (user.Ratings.Count < 2); lock (user) { var ratingIndex = rng.Next(user.Ratings.Count); var rating = user.Ratings[ratingIndex]; var originalRatings = user.Ratings; user.Ratings = user.Ratings.Where(r => r != rating).ToList(); var predictedRating = rs.Recommender.PredictRatingForArtist(user, SimpleKnnModel, Artists, rating.ArtistIndex); var error = predictedRating - rating.Value; biasBC[(int)rating.Value - 1].Add(error); maeBC[(int)rating.Value - 1].Add(Math.Abs(error)); user.Ratings = originalRatings; Write(string.Format("{0}/t{1}", predictedRating, rating.Value), false); if (maeBC.Sum(bc => bc.Count) % writeFrequency == 0) Write(string.Format("Test {0} with {1} ({2})", TestName, GetMaeBiasAndVariance(biasBC, maeBC), DateTime.Now), toFile: false); } }) Link To Source File
Stateless Program.cs Parallel.For(0, 10, i => { C.WriteLine(ConsoleColor.Black, "firing {0}", i); m.Fire(' ', i); }) Link To Source File
Stateless Program.cs Parallel.For(0, 10, i => { C.WriteLine(ConsoleColor.Black, "firing {0}", i); m.Fire(' ', i); }) Link To Source File
ILSpy CSDemo.cs Parallel.For( 0, assemblies.Length, delegate (int i) { Stopwatch w = Stopwatch.StartNew(); CecilLoader loader = new CecilLoader(); projectContents[i] = loader.LoadAssemblyFile(assemblies[i].Location); Debug.WriteLine(Path.GetFileName(assemblies[i].Location) + ": " + w.Elapsed); }) Link To Source File
ILSpy MainWindow.cs Parallel.For( 0, assemblies.Length, delegate (int i) { Stopwatch w = Stopwatch.StartNew(); CecilLoader loader = new CecilLoader(); projectContents[i] = loader.LoadAssemblyFile(assemblies[i].Location); }) Link To Source File
NRefactory CSDemo.cs Parallel.For( 0, assemblies.Length, delegate (int i) { Stopwatch w = Stopwatch.StartNew(); CecilLoader loader = new CecilLoader(); projectContents[i] = loader.LoadAssemblyFile(assemblies[i].Location); Debug.WriteLine(Path.GetFileName(assemblies[i].Location) + ": " + w.Elapsed); }) Link To Source File
NRefactory MainWindow.cs Parallel.For( 0, assemblies.Length, delegate (int i) { Stopwatch w = Stopwatch.StartNew(); CecilLoader loader = new CecilLoader(); projectContents[i] = loader.LoadAssemblyFile(assemblies[i].Location); }) Link To Source File
MafiaSimulator FrmMain.cs Parallel.For(0, iterations, i => simulateGame(i, false, options)) Link To Source File
NETGen ClusterAggregation.cs System.Threading.Tasks.Parallel.For(0, Properties.Settings.Default.runs, j => { ClusterNetwork net = new ClusterNetwork(Properties.Settings.Default.Nodes, Properties.Settings.Default.Edges, Properties.Settings.Default.Clusters, mod); Console.WriteLine("Run {0}, created cluster network with modularity={2:0.00}", j, (net as ClusterNetwork).NewmanModularity); res = RunAggregation(net, bias); results.Add(res.FinalVariance); modularity.Add(res.Modularity); }) Link To Source File
NETGen ClusterSIR.cs System.Threading.Tasks.Parallel.For(0, Properties.Settings.Default.runs, j => { ClusterNetwork net = new ClusterNetwork(0, 0,0 ,0d); Console.WriteLine("Run {0}, created cluster network with modularity={2:0.00}", j, (net as ClusterNetwork).NewmanModularity); /// TODO: Run experiment }) Link To Source File
NETGen ClusterSpreading.cs System.Threading.Tasks.Parallel.For(0, Properties.Settings.Default.runs, j => { ClusterNetwork net = new ClusterNetwork(Properties.Settings.Default.Nodes, Properties.Settings.Default.Edges, Properties.Settings.Default.Clusters, mod); Console.WriteLine("Run {0}, created cluster network with modularity={1:0.00}", j, (net as ClusterNetwork).NewmanModularity); /// TODO: Run experiment }) Link To Source File
Autobe ImapConnectionPool.cs Parallel.For(0, MaxAllowedImapConnections, i => { ImapConnection connection = new ImapConnection(_settings, _credentials); connection.OpenAndAuthenticate(); _connectionHolder.Add(connection); } ) Link To Source File
iSynaptic.Commons FuncExtensionsTests.generated.cs Parallel.For(start, end, x => func(1)) Link To Source File
iSynaptic.Commons FuncExtensionsTests.generated.cs Parallel.For(start, end, x => func(1, 2)) Link To Source File
iSynaptic.Commons FuncExtensionsTests.generated.cs Parallel.For(start, end, x => func(1, 2, 3)) Link To Source File
iSynaptic.Commons FuncExtensionsTests.generated.cs Parallel.For(start, end, x => func(1, 2, 3, 4)) Link To Source File
iSynaptic.Commons FuncExtensionsTests.generated.cs Parallel.For(start, end, x => func(1, 2, 3, 4, 5)) Link To Source File
iSynaptic.Commons FuncExtensionsTests.generated.cs Parallel.For(start, end, x => func(1, 2, 3, 4, 5, 6)) Link To Source File
iSynaptic.Commons FuncExtensionsTests.generated.cs Parallel.For(start, end, x => func(1, 2, 3, 4, 5, 6, 7)) Link To Source File
iSynaptic.Commons FuncExtensionsTests.cs Parallel.For(start, end, x => func()) Link To Source File
Craig-s-Utility-Library Random.cs Parallel.For(0, 100, x => { Assert.DoesNotThrow(() => Utilities.Random.Random.ThreadSafeNext(-20, 20)); }) Link To Source File
Craig-s-Utility-Library IEnumerableExtensions.cs Parallel.For(Start, End + 1, new Action(x => Action(List.ElementAt(x)))) Link To Source File
Craig-s-Utility-Library IEnumerableExtensions.cs Parallel.For(Start, End + 1, new Action(x => Results[x - Start] = Function(List.ElementAt(x)))) Link To Source File
Craig-s-Utility-Library BitmapExtensions.cs Parallel.For(0, Width, x => { for (int y = 0; y < Height; ++y) { Color CurrentPixel = OldData.GetPixel(x, y, OldPixelSize); int R = CurrentPixel.R + Random.Random.ThreadSafeNext(-Amount, Amount + 1); int G = CurrentPixel.G + Random.Random.ThreadSafeNext(-Amount, Amount + 1); int B = CurrentPixel.B + Random.Random.ThreadSafeNext(-Amount, Amount + 1); R = R.Clamp(255, 0); G = G.Clamp(255, 0); B = B.Clamp(255, 0); Color TempValue = Color.FromArgb(R, G, B); NewData.SetPixel(x, y, TempValue, NewPixelSize); } }) Link To Source File
Craig-s-Utility-Library BitmapExtensions.cs Parallel.For(0, Width, x => { for (int y = 0; y < Height; ++y) { Color Pixel = OldData.GetPixel(x, y, OldPixelSize); float Red = Pixel.R / 255.0f; float Green = Pixel.G / 255.0f; float Blue = Pixel.B / 255.0f; Red = (((Red - 0.5f) * Value) + 0.5f) * 255.0f; Green = (((Green - 0.5f) * Value) + 0.5f) * 255.0f; Blue = (((Blue - 0.5f) * Value) + 0.5f) * 255.0f; NewData.SetPixel(x, y, Color.FromArgb(((int)Red).Clamp(255, 0), ((int)Green).Clamp(255, 0), ((int)Blue).Clamp(255, 0)), NewPixelSize); } }) Link To Source File
Craig-s-Utility-Library BitmapExtensions.cs Parallel.For(0, 256, x => { Ramp[x] = ((int)((255.0 * System.Math.Pow(x / 255.0, 1.0 / Value)) + 0.5)).Clamp(255, 0); }) Link To Source File
Craig-s-Utility-Library BitmapExtensions.cs Parallel.For(0, Width, x => { for (int y = 0; y < Height; ++y) { Color Pixel = OldData.GetPixel(x, y, OldPixelSize); int Red = Ramp[Pixel.R]; int Green = Ramp[Pixel.G]; int Blue = Ramp[Pixel.B]; NewData.SetPixel(x, y, Color.FromArgb(Red, Green, Blue), NewPixelSize); } }) Link To Source File
Craig-s-Utility-Library BitmapExtensions.cs Parallel.For(0, Width, x => { for (int y = 0; y < Height; ++y) { Color Pixel1 = OldData1.GetPixel(x, y, OldPixelSize1); Color Pixel2 = OldData2.GetPixel(x, y, OldPixelSize2); NewData.SetPixel(x, y, Color.FromArgb(Pixel1.R & Pixel2.R, Pixel1.G & Pixel2.G, Pixel1.B & Pixel2.B), NewPixelSize); } }) Link To Source File
Craig-s-Utility-Library BitmapExtensions.cs Parallel.For(0, Width, x => { for (int y = 0; y < Height; ++y) { int ColorUsing = OldData.GetPixel(x, y, OldPixelSize).R; NewData.SetPixel(x, y, Colors[ColorUsing], NewPixelSize); } }) Link To Source File
Craig-s-Utility-Library BitmapExtensions.cs Parallel.For(0, Width, x => { for (int y = 0; y < Height; ++y) { int RValue = 0; int GValue = 0; int BValue = 0; for (int x2 = ApetureMin; x2 < ApetureMax; ++x2) { int TempX = x + x2; if (TempX >= 0 && TempX < Width) { for (int y2 = ApetureMin; y2 < ApetureMax; ++y2) { int TempY = y + y2; if (TempY >= 0 && TempY < Height) { Color TempColor = OldData.GetPixel(TempX, TempY, OldPixelSize); RValue = RValue.Max(TempColor.R); GValue = GValue.Max(TempColor.G); BValue = BValue.Max(TempColor.B); } } } } Color TempPixel = Color.FromArgb(RValue, GValue, BValue); NewData.SetPixel(x, y, TempPixel, NewPixelSize); } }) Link To Source File
Craig-s-Utility-Library BitmapExtensions.cs Parallel.For(0, Width, x => { for (int y = 0; y < Height; ++y) { Color CurrentColor = OldData.GetPixel(x, y, OldPixelSize); if (y < Height - 1 && x < Width - 1) { Color TempColor = OldData.GetPixel(x + 1, y + 1, OldPixelSize); if (Distance(CurrentColor.R, TempColor.R, CurrentColor.G, TempColor.G, CurrentColor.B, TempColor.B) > Threshold) NewData.SetPixel(x, y, EdgeColor, NewPixelSize); } else if (y < Height - 1) { Color TempColor = OldData.GetPixel(x, y + 1, OldPixelSize); if (Distance(CurrentColor.R, TempColor.R, CurrentColor.G, TempColor.G, CurrentColor.B, TempColor.B) > Threshold) NewData.SetPixel(x, y, EdgeColor, NewPixelSize); } else if (x < Width - 1) { Color TempColor = OldData.GetPixel(x + 1, y, OldPixelSize); if (Distance(CurrentColor.R, TempColor.R, CurrentColor.G, TempColor.G, CurrentColor.B, TempColor.B) > Threshold) NewData.SetPixel(x, y, EdgeColor, NewPixelSize); } } }) Link To Source File
Craig-s-Utility-Library BitmapExtensions.cs Parallel.For(0, Width, x => { for (int y = 0; y < Height; ++y) { Color Current = OldData.GetPixel(x, y, OldPixelSize); int NewR = (int)TempHistogram.R[Current.R]; int NewG = (int)TempHistogram.G[Current.G]; int NewB = (int)TempHistogram.B[Current.B]; NewR = NewR.Clamp(255, 0); NewG = NewG.Clamp(255, 0); NewB = NewB.Clamp(255, 0); NewData.SetPixel(x, y, Color.FromArgb(NewR, NewG, NewB), NewPixelSize); } }) Link To Source File
Craig-s-Utility-Library BitmapExtensions.cs Parallel.For(0, Width, x => { for (int y = 0; y < Height; ++y) { int NewX = Random.Random.ThreadSafeNext(-MaxJitter, MaxJitter); int NewY = Random.Random.ThreadSafeNext(-MaxJitter, MaxJitter); NewX += x; NewY += y; NewX = NewX.Clamp(Width - 1, 0); NewY = NewY.Clamp(Height - 1, 0); NewData.SetPixel(x, y, OldData.GetPixel(NewX, NewY, OldPixelSize), NewPixelSize); } }) Link To Source File
Craig-s-Utility-Library BitmapExtensions.cs Parallel.For(0, Width, x => { for (int y = 0; y < Height; ++y) { int[] RValues = { 0, 0, 0, 0 }; int[] GValues = { 0, 0, 0, 0 }; int[] BValues = { 0, 0, 0, 0 }; int[] NumPixels = { 0, 0, 0, 0 }; int[] MaxRValue = { 0, 0, 0, 0 }; int[] MaxGValue = { 0, 0, 0, 0 }; int[] MaxBValue = { 0, 0, 0, 0 }; int[] MinRValue = { 255, 255, 255, 255 }; int[] MinGValue = { 255, 255, 255, 255 }; int[] MinBValue = { 255, 255, 255, 255 }; for (int i = 0; i < 4; ++i) { for (int x2 = ApetureMinX[i]; x2 < ApetureMaxX[i]; ++x2) { int TempX = x + x2; if (TempX >= 0 && TempX < Width) { for (int y2 = ApetureMinY[i]; y2 < ApetureMaxY[i]; ++y2) { int TempY = y + y2; if (TempY >= 0 && TempY < Height) { Color TempColor = OldData.GetPixel(TempX, TempY, OldPixelSize); RValues[i] += TempColor.R; GValues[i] += TempColor.G; BValues[i] += TempColor.B; if (TempColor.R > MaxRValue[i]) MaxRValue[i] = TempColor.R; else if (TempColor.R < MinRValue[i]) MinRValue[i] = TempColor.R; if (TempColor.G > MaxGValue[i]) MaxGValue[i] = TempColor.G; else if (TempColor.G < MinGValue[i]) MinGValue[i] = TempColor.G; if (TempColor.B > MaxBValue[i]) MaxBValue[i] = TempColor.B; else if (TempColor.B < MinBValue[i]) MinBValue[i] = TempColor.B; ++NumPixels[i]; } } } } } int j = 0; int MinDifference = 10000; for (int i = 0; i < 4; ++i) { int CurrentDifference = (MaxRValue[i] - MinRValue[i]) + (MaxGValue[i] - MinGValue[i]) + (MaxBValue[i] - MinBValue[i]); if (CurrentDifference < MinDifference && NumPixels[i] > 0) { j = i; MinDifference = CurrentDifference; } } Color MeanPixel = Color.FromArgb(RValues[j] / NumPixels[j], GValues[j] / NumPixels[j], BValues[j] / NumPixels[j]); NewData.SetPixel(x, y, MeanPixel, NewPixelSize); } }) Link To Source File
Craig-s-Utility-Library BitmapExtensions.cs Parallel.For(0, Width, x => { for (int y = 0; y < Height; ++y) { List RValues = new List(); List GValues = new List(); List BValues = new List(); for (int x2 = ApetureMin; x2 < ApetureMax; ++x2) { int TempX = x + x2; if (TempX >= 0 && TempX < Width) { for (int y2 = ApetureMin; y2 < ApetureMax; ++y2) { int TempY = y + y2; if (TempY >= 0 && TempY < Height) { Color TempColor = OldData.GetPixel(TempX, TempY, OldPixelSize); RValues.Add(TempColor.R); GValues.Add(TempColor.G); BValues.Add(TempColor.B); } } } } Color MedianPixel = Color.FromArgb(RValues.Median(), GValues.Median(), BValues.Median()); NewData.SetPixel(x, y, MedianPixel, NewPixelSize); } }) Link To Source File
Craig-s-Utility-Library BitmapExtensions.cs Parallel.For(0, Width, x => { for (int y = 0; y < Height; ++y) { Color CurrentPixel = OldData.GetPixel(x, y, OldPixelSize); Color TempValue = Color.FromArgb(255 - CurrentPixel.R, 255 - CurrentPixel.G, 255 - CurrentPixel.B); NewData.SetPixel(x, y, TempValue, NewPixelSize); } }) Link To Source File
Craig-s-Utility-Library BitmapExtensions.cs Parallel.For(0, Width, x => { for (int y = 0; y < Height; ++y) { Color Pixel1 = OldData1.GetPixel(x, y, OldPixelSize1); Color Pixel2 = OldData2.GetPixel(x, y, OldPixelSize2); NewData.SetPixel(x, y, Color.FromArgb(Pixel1.R | Pixel2.R, Pixel1.G | Pixel2.G, Pixel1.B | Pixel2.B), NewPixelSize); } }) Link To Source File
Craig-s-Utility-Library BitmapExtensions.cs Parallel.For(MinX, MaxX, x2 => { for (int y2 = MinY; y2 < MaxY; ++y2) { NewData.SetPixel(x2, y2, TempPixel, NewPixelSize); } }) Link To Source File
Craig-s-Utility-Library BitmapExtensions.cs Parallel.For(0, Width, x => { for (int y = 0; y < Height; ++y) { double Value1 = 0; double Value2 = 0; if (YDirection) Value1 = System.Math.Sin(((x * Frequency) * System.Math.PI) / 180.0d) * Amplitude; if (XDirection) Value2 = System.Math.Sin(((y * Frequency) * System.Math.PI) / 180.0d) * Amplitude; Value1 = y - (int)Value1; Value2 = x - (int)Value2; while (Value1 < 0) Value1 += Height; while (Value2 < 0) Value2 += Width; while (Value1 >= Height) Value1 -= Height; while (Value2 >= Width) Value2 -= Width; NewData.SetPixel(x, y, OldData.GetPixel((int)Value2, (int)Value1, OldPixelSize), NewPixelSize); } }) Link To Source File
Craig-s-Utility-Library BitmapExtensions.cs Parallel.For(0, Width, x => { for (int y = 0; y < Height; ++y) { Color Pixel1 = OldData1.GetPixel(x, y, OldPixelSize1); Color Pixel2 = OldData2.GetPixel(x, y, OldPixelSize2); NewData.SetPixel(x, y, Color.FromArgb((Pixel1.R + Pixel2.R).Clamp(255, 0), (Pixel1.G + Pixel2.G).Clamp(255, 0), (Pixel1.B + Pixel2.B).Clamp(255, 0)), NewPixelSize); } }) Link To Source File
Craig-s-Utility-Library BitmapExtensions.cs Parallel.For(0, Width, x => { for (int y = 0; y < Height; ++y) { int RValue = 0; int GValue = 0; int BValue = 0; int NumPixels = 0; for (int x2 = ApetureMinX; x2 < ApetureMaxX; ++x2) { int TempX1 = x + x2; int TempX2 = x - x2; if (TempX1 >= 0 && TempX1 < Width && TempX2 >= 0 && TempX2 < Width) { for (int y2 = ApetureMinY; y2 < ApetureMaxY; ++y2) { int TempY1 = y + y2; int TempY2 = y - y2; if (TempY1 >= 0 && TempY1 < Height && TempY2 >= 0 && TempY2 < Height) { Color TempColor = OldData.GetPixel(x, y, OldPixelSize); Color TempColor2 = OldData.GetPixel(TempX1, TempY1, OldPixelSize); Color TempColor3 = OldData.GetPixel(TempX2, TempY2, OldPixelSize); if (Distance(TempColor.R, TempColor2.R, TempColor.G, TempColor2.G, TempColor.B, TempColor2.B) < Distance(TempColor.R, TempColor3.R, TempColor.G, TempColor3.G, TempColor.B, TempColor3.B)) { RValue += TempColor2.R; GValue += TempColor2.G; BValue += TempColor2.B; } else { RValue += TempColor3.R; GValue += TempColor3.G; BValue += TempColor3.B; } ++NumPixels; } } } } Color MeanPixel = Color.FromArgb(RValue / NumPixels, GValue / NumPixels, BValue / NumPixels); NewData.SetPixel(x, y, MeanPixel, NewPixelSize); } }) Link To Source File
Craig-s-Utility-Library BitmapExtensions.cs Parallel.For(0, Width, x => { for (int y = 0; y < Height; ++y) { Color CurrentPixel = OldData.GetPixel(x, y, OldPixelSize); Color TempValue = Color.FromArgb(Map(CurrentPixel.R, MinValue.R, MaxValue.R), Map(CurrentPixel.G, MinValue.G, MaxValue.G), Map(CurrentPixel.B, MinValue.B, MaxValue.B)); NewData.SetPixel(x, y, TempValue, NewPixelSize); } }) Link To Source File
Craig-s-Utility-Library BitmapExtensions.cs Parallel.For(0, Width, x => { for (int y = 0; y < Height; ++y) { Color TempColor = OldData.GetPixel(x, y, OldPixelSize); if ((TempColor.R + TempColor.G + TempColor.B) / 755.0f > Threshold) NewData.SetPixel(x, y, Color.White, NewPixelSize); else NewData.SetPixel(x, y, Color.Black, NewPixelSize); } }) Link To Source File
Craig-s-Utility-Library BitmapExtensions.cs Parallel.For(0, Height, y => { for (int x = 0; x < Width; ++x) { float XDistortion = x + (GetHeight(x, y, XNoiseData, XNoisePixelSize) * Power); float YDistortion = y + (GetHeight(x, y, YNoiseData, YNoisePixelSize) * Power); int X1 = ((int)XDistortion).Clamp(Width - 1, 0); int Y1 = ((int)YDistortion).Clamp(Height - 1, 0); ReturnData.SetPixel(x, y, OriginalData.GetPixel(X1, Y1, OriginalPixelSize), ReturnPixelSize); } }) Link To Source File
Craig-s-Utility-Library BitmapExtensions.cs Parallel.For(0, Width, x => { for (int y = 0; y < Height; ++y) { Color Pixel1 = OldData1.GetPixel(x, y, OldPixelSize1); Color Pixel2 = OldData2.GetPixel(x, y, OldPixelSize2); NewData.SetPixel(x, y, Color.FromArgb(Pixel1.R ^ Pixel2.R, Pixel1.G ^ Pixel2.G, Pixel1.B ^ Pixel2.B), NewPixelSize); } }) Link To Source File
Craig-s-Utility-Library Filter.cs Parallel.For(0, Width2, x => { for (int y = 0; y < Height2; ++y) { int RValue = 0; int GValue = 0; int BValue = 0; int Weight = 0; int XCurrent = -Width / 2; for (int x2 = 0; x2 < Width; ++x2) { if (XCurrent + x < Width2 && XCurrent + x >= 0) { int YCurrent = -Height / 2; for (int y2 = 0; y2 < Height; ++y2) { if (YCurrent + y < Height2 && YCurrent + y >= 0) { Color Pixel = OldData.GetPixel(XCurrent + x, YCurrent + y, OldPixelSize); RValue += MyFilter[x2, y2] * Pixel.R; GValue += MyFilter[x2, y2] * Pixel.G; BValue += MyFilter[x2, y2] * Pixel.B; Weight += MyFilter[x2, y2]; } ++YCurrent; } } ++XCurrent; } Color MeanPixel = OldData.GetPixel(x, y, OldPixelSize); if (Weight == 0) Weight = 1; if (Weight > 0) { if (Absolute) { RValue = System.Math.Abs(RValue); GValue = System.Math.Abs(GValue); BValue = System.Math.Abs(BValue); } RValue = (RValue / Weight) + Offset; RValue = RValue.Clamp(255, 0); GValue = (GValue / Weight) + Offset; GValue = GValue.Clamp(255, 0); BValue = (BValue / Weight) + Offset; BValue = BValue.Clamp(255, 0); MeanPixel = Color.FromArgb(RValue, GValue, BValue); } NewData.SetPixel(x, y, MeanPixel, NewPixelSize); } }) Link To Source File
Craig-s-Utility-Library MotionDetection.cs Parallel.For(0, Width, x => { for (int y = 0; y < Height; ++y) { Color NewPixel = NewImage2Data.GetPixel(x, y, NewImage2PixelSize); Color OldPixel = OldImage2Data.GetPixel(x, y, OldImage2PixelSize); if (System.Math.Pow((double)(NewPixel.R - OldPixel.R), 2.0) > Threshold) { OverlayData.SetPixel(x, y, Color.FromArgb(100, 0, 100), OverlayPixelSize); } else { OverlayData.SetPixel(x, y, Color.FromArgb(200, 0, 200), OverlayPixelSize); } } }) Link To Source File
Craig-s-Utility-Library MotionDetection.cs Parallel.For(0, Width, x => { for (int y = 0; y < Height; ++y) { Color Pixel1 = Overlay2Data.GetPixel(x, y, Overlay2PixelSize); if (Pixel1.R != DetectionColor.R || Pixel1.G != DetectionColor.G || Pixel1.B != DetectionColor.B) { Overlay2Data.SetPixel(x, y, Color.FromArgb(200, 0, 200), Overlay2PixelSize); } } }) Link To Source File
Craig-s-Utility-Library NormalMap.cs Parallel.For(0, Height, y => { Math.Vector3 TempVector = new Utilities.Math.Vector3(0.0, 0.0, 0.0); for (int x = 0; x < Width; ++x) { Color TempPixelX = TempImageXData.GetPixel(x, y, TempImageXPixelSize); Color TempPixelY = TempImageYData.GetPixel(x, y, TempImageYPixelSize); TempVector.X = (double)(TempPixelX.R) / 255.0; TempVector.Y = (double)(TempPixelY.R) / 255.0; TempVector.Z = 1.0; TempVector.Normalize(); TempVector.X = ((TempVector.X + 1.0) / 2.0) * 255.0; TempVector.Y = ((TempVector.Y + 1.0) / 2.0) * 255.0; TempVector.Z = ((TempVector.Z + 1.0) / 2.0) * 255.0; ReturnImageData.SetPixel(x, y, Color.FromArgb((int)TempVector.X, (int)TempVector.Y, (int)TempVector.Z), ReturnImagePixelSize); } }) Link To Source File
Craig-s-Utility-Library OilPainting.cs Parallel.For(0, _NumberOfPoints, i => { int Red = 0; int Green = 0; int Blue = 0; int Counter = 0; for (int x = 0; x < Width; ++x) { for (int y = 0; y < Height; ++y) { if (Map.ClosestPoint[x, y] == i) { Color Pixel = ImageData.GetPixel(x, y, ImagePixelSize); Red += Pixel.R; Green += Pixel.G; Blue += Pixel.B; ++Counter; } } } int Counter2 = 0; for (int x = 0; x < Width; ++x) { for (int y = 0; y < Height; ++y) { if (Map.ClosestPoint[x, y] == i) { ImageData.SetPixel(x, y, Color.FromArgb(Red / Counter, Green / Counter, Blue / Counter), ImagePixelSize); ++Counter2; if (Counter2 == Counter) break; } } if (Counter2 == Counter) break; } }) Link To Source File
Craig-s-Utility-Library IEnumerableExtensions.cs Parallel.For(Start, End + 1, new Action(x => Action(List.ElementAt(x)))) Link To Source File
Craig-s-Utility-Library IEnumerableExtensions.cs Parallel.For(Start, End + 1, new Action(x => Results[x - Start] = Function(List.ElementAt(x)))) Link To Source File
Craig-s-Utility-Library Filter.cs Parallel.For(0, Width2, x => { for (int y = 0; y < Height2; ++y) { int RValue = 0; int GValue = 0; int BValue = 0; int Weight = 0; int XCurrent = -Width / 2; for (int x2 = 0; x2 < Width; ++x2) { if (XCurrent + x < Width2 && XCurrent + x >= 0) { int YCurrent = -Height / 2; for (int y2 = 0; y2 < Height; ++y2) { if (YCurrent + y < Height2 && YCurrent + y >= 0) { Color Pixel = OldData.GetPixel(XCurrent + x, YCurrent + y, OldPixelSize); RValue += MyFilter[x2, y2] * Pixel.R; GValue += MyFilter[x2, y2] * Pixel.G; BValue += MyFilter[x2, y2] * Pixel.B; Weight += MyFilter[x2, y2]; } ++YCurrent; } } ++XCurrent; } Color MeanPixel = OldData.GetPixel(x, y, OldPixelSize); if (Weight == 0) Weight = 1; if (Weight > 0) { if (Absolute) { RValue = System.Math.Abs(RValue); GValue = System.Math.Abs(GValue); BValue = System.Math.Abs(BValue); } RValue = (RValue / Weight) + Offset; RValue = RValue.Clamp(255, 0); GValue = (GValue / Weight) + Offset; GValue = GValue.Clamp(255, 0); BValue = (BValue / Weight) + Offset; BValue = BValue.Clamp(255, 0); MeanPixel = Color.FromArgb(RValue, GValue, BValue); } NewData.SetPixel(x, y, MeanPixel, NewPixelSize); } }) Link To Source File
Craig-s-Utility-Library MotionDetection.cs Parallel.For(0, Width, x => { for (int y = 0; y < Height; ++y) { Color NewPixel = NewImage2Data.GetPixel(x, y, NewImage2PixelSize); Color OldPixel = OldImage2Data.GetPixel(x, y, OldImage2PixelSize); if (System.Math.Pow((double)(NewPixel.R - OldPixel.R), 2.0) > Threshold) { OverlayData.SetPixel(x, y, Color.FromArgb(100, 0, 100), OverlayPixelSize); } else { OverlayData.SetPixel(x, y, Color.FromArgb(200, 0, 200), OverlayPixelSize); } } }) Link To Source File
Craig-s-Utility-Library MotionDetection.cs Parallel.For(0, Width, x => { for (int y = 0; y < Height; ++y) { Color Pixel1 = Overlay2Data.GetPixel(x, y, Overlay2PixelSize); if (Pixel1.R != DetectionColor.R || Pixel1.G != DetectionColor.G || Pixel1.B != DetectionColor.B) { Overlay2Data.SetPixel(x, y, Color.FromArgb(200, 0, 200), Overlay2PixelSize); } } }) Link To Source File
Craig-s-Utility-Library NormalMap.cs Parallel.For(0, Height, y => { Math.Vector3 TempVector = new Utilities.Math.Vector3(0.0, 0.0, 0.0); for (int x = 0; x < Width; ++x) { Color TempPixelX = TempImageXData.GetPixel(x, y, TempImageXPixelSize); Color TempPixelY = TempImageYData.GetPixel(x, y, TempImageYPixelSize); TempVector.X = (double)(TempPixelX.R) / 255.0; TempVector.Y = (double)(TempPixelY.R) / 255.0; TempVector.Z = 1.0; TempVector.Normalize(); TempVector.X = ((TempVector.X + 1.0) / 2.0) * 255.0; TempVector.Y = ((TempVector.Y + 1.0) / 2.0) * 255.0; TempVector.Z = ((TempVector.Z + 1.0) / 2.0) * 255.0; ReturnImageData.SetPixel(x, y, Color.FromArgb((int)TempVector.X, (int)TempVector.Y, (int)TempVector.Z), ReturnImagePixelSize); } }) Link To Source File
Craig-s-Utility-Library OilPainting.cs Parallel.For(0, _NumberOfPoints, i => { int Red = 0; int Green = 0; int Blue = 0; int Counter = 0; for (int x = 0; x < Width; ++x) { for (int y = 0; y < Height; ++y) { if (Map.ClosestPoint[x, y] == i) { Color Pixel = ImageData.GetPixel(x, y, ImagePixelSize); Red += Pixel.R; Green += Pixel.G; Blue += Pixel.B; ++Counter; } } } int Counter2 = 0; for (int x = 0; x < Width; ++x) { for (int y = 0; y < Height; ++y) { if (Map.ClosestPoint[x, y] == i) { ImageData.SetPixel(x, y, Color.FromArgb(Red / Counter, Green / Counter, Blue / Counter), ImagePixelSize); ++Counter2; if (Counter2 == Counter) break; } } if (Counter2 == Counter) break; } }) Link To Source File
Craig-s-Utility-Library BitmapExtensions.cs Parallel.For(0, Width, x => { for (int y = 0; y < Height; ++y) { Color CurrentPixel = OldData.GetPixel(x, y, OldPixelSize); int R = CurrentPixel.R + Random.Random.ThreadSafeNext(-Amount, Amount + 1); int G = CurrentPixel.G + Random.Random.ThreadSafeNext(-Amount, Amount + 1); int B = CurrentPixel.B + Random.Random.ThreadSafeNext(-Amount, Amount + 1); R = R.Clamp(255, 0); G = G.Clamp(255, 0); B = B.Clamp(255, 0); Color TempValue = Color.FromArgb(R, G, B); NewData.SetPixel(x, y, TempValue, NewPixelSize); } }) Link To Source File
Craig-s-Utility-Library BitmapExtensions.cs Parallel.For(0, Width, x => { for (int y = 0; y < Height; ++y) { Color Pixel = OldData.GetPixel(x, y, OldPixelSize); float Red = Pixel.R / 255.0f; float Green = Pixel.G / 255.0f; float Blue = Pixel.B / 255.0f; Red = (((Red - 0.5f) * Value) + 0.5f) * 255.0f; Green = (((Green - 0.5f) * Value) + 0.5f) * 255.0f; Blue = (((Blue - 0.5f) * Value) + 0.5f) * 255.0f; NewData.SetPixel(x, y, Color.FromArgb(((int)Red).Clamp(255, 0), ((int)Green).Clamp(255, 0), ((int)Blue).Clamp(255, 0)), NewPixelSize); } }) Link To Source File
Craig-s-Utility-Library BitmapExtensions.cs Parallel.For(0, 256, x => { Ramp[x] = ((int)((255.0 * System.Math.Pow(x / 255.0, 1.0 / Value)) + 0.5)).Clamp(255, 0); }) Link To Source File
Craig-s-Utility-Library BitmapExtensions.cs Parallel.For(0, Width, x => { for (int y = 0; y < Height; ++y) { Color Pixel = OldData.GetPixel(x, y, OldPixelSize); int Red = Ramp[Pixel.R]; int Green = Ramp[Pixel.G]; int Blue = Ramp[Pixel.B]; NewData.SetPixel(x, y, Color.FromArgb(Red, Green, Blue), NewPixelSize); } }) Link To Source File
Craig-s-Utility-Library BitmapExtensions.cs Parallel.For(0, Width, x => { for (int y = 0; y < Height; ++y) { Color Pixel1 = OldData1.GetPixel(x, y, OldPixelSize1); Color Pixel2 = OldData2.GetPixel(x, y, OldPixelSize2); NewData.SetPixel(x, y, Color.FromArgb(Pixel1.R & Pixel2.R, Pixel1.G & Pixel2.G, Pixel1.B & Pixel2.B), NewPixelSize); } }) Link To Source File
Craig-s-Utility-Library BitmapExtensions.cs Parallel.For(0, Width, x => { for (int y = 0; y < Height; ++y) { int ColorUsing = OldData.GetPixel(x, y, OldPixelSize).R; NewData.SetPixel(x, y, Colors[ColorUsing], NewPixelSize); } }) Link To Source File
Craig-s-Utility-Library BitmapExtensions.cs Parallel.For(0, Width, x => { for (int y = 0; y < Height; ++y) { int RValue = 0; int GValue = 0; int BValue = 0; for (int x2 = ApetureMin; x2 < ApetureMax; ++x2) { int TempX = x + x2; if (TempX >= 0 && TempX < Width) { for (int y2 = ApetureMin; y2 < ApetureMax; ++y2) { int TempY = y + y2; if (TempY >= 0 && TempY < Height) { Color TempColor = OldData.GetPixel(TempX, TempY, OldPixelSize); RValue = RValue.Max(TempColor.R); GValue = GValue.Max(TempColor.G); BValue = BValue.Max(TempColor.B); } } } } Color TempPixel = Color.FromArgb(RValue, GValue, BValue); NewData.SetPixel(x, y, TempPixel, NewPixelSize); } }) Link To Source File
Craig-s-Utility-Library BitmapExtensions.cs Parallel.For(0, Width, x => { for (int y = 0; y < Height; ++y) { Color CurrentColor = OldData.GetPixel(x, y, OldPixelSize); if (y < Height - 1 && x < Width - 1) { Color TempColor = OldData.GetPixel(x + 1, y + 1, OldPixelSize); if (Distance(CurrentColor.R, TempColor.R, CurrentColor.G, TempColor.G, CurrentColor.B, TempColor.B) > Threshold) NewData.SetPixel(x, y, EdgeColor, NewPixelSize); } else if (y < Height - 1) { Color TempColor = OldData.GetPixel(x, y + 1, OldPixelSize); if (Distance(CurrentColor.R, TempColor.R, CurrentColor.G, TempColor.G, CurrentColor.B, TempColor.B) > Threshold) NewData.SetPixel(x, y, EdgeColor, NewPixelSize); } else if (x < Width - 1) { Color TempColor = OldData.GetPixel(x + 1, y, OldPixelSize); if (Distance(CurrentColor.R, TempColor.R, CurrentColor.G, TempColor.G, CurrentColor.B, TempColor.B) > Threshold) NewData.SetPixel(x, y, EdgeColor, NewPixelSize); } } }) Link To Source File
Craig-s-Utility-Library BitmapExtensions.cs Parallel.For(0, Width, x => { for (int y = 0; y < Height; ++y) { Color Current = OldData.GetPixel(x, y, OldPixelSize); int NewR = (int)TempHistogram.R[Current.R]; int NewG = (int)TempHistogram.G[Current.G]; int NewB = (int)TempHistogram.B[Current.B]; NewR = NewR.Clamp(255, 0); NewG = NewG.Clamp(255, 0); NewB = NewB.Clamp(255, 0); NewData.SetPixel(x, y, Color.FromArgb(NewR, NewG, NewB), NewPixelSize); } }) Link To Source File
Craig-s-Utility-Library BitmapExtensions.cs Parallel.For(0, Width, x => { for (int y = 0; y < Height; ++y) { int NewX = Random.Random.ThreadSafeNext(-MaxJitter, MaxJitter); int NewY = Random.Random.ThreadSafeNext(-MaxJitter, MaxJitter); NewX += x; NewY += y; NewX = NewX.Clamp(Width - 1, 0); NewY = NewY.Clamp(Height - 1, 0); NewData.SetPixel(x, y, OldData.GetPixel(NewX, NewY, OldPixelSize), NewPixelSize); } }) Link To Source File
Craig-s-Utility-Library BitmapExtensions.cs Parallel.For(0, Width, x => { for (int y = 0; y < Height; ++y) { int[] RValues = { 0, 0, 0, 0 }; int[] GValues = { 0, 0, 0, 0 }; int[] BValues = { 0, 0, 0, 0 }; int[] NumPixels = { 0, 0, 0, 0 }; int[] MaxRValue = { 0, 0, 0, 0 }; int[] MaxGValue = { 0, 0, 0, 0 }; int[] MaxBValue = { 0, 0, 0, 0 }; int[] MinRValue = { 255, 255, 255, 255 }; int[] MinGValue = { 255, 255, 255, 255 }; int[] MinBValue = { 255, 255, 255, 255 }; for (int i = 0; i < 4; ++i) { for (int x2 = ApetureMinX[i]; x2 < ApetureMaxX[i]; ++x2) { int TempX = x + x2; if (TempX >= 0 && TempX < Width) { for (int y2 = ApetureMinY[i]; y2 < ApetureMaxY[i]; ++y2) { int TempY = y + y2; if (TempY >= 0 && TempY < Height) { Color TempColor = OldData.GetPixel(TempX, TempY, OldPixelSize); RValues[i] += TempColor.R; GValues[i] += TempColor.G; BValues[i] += TempColor.B; if (TempColor.R > MaxRValue[i]) MaxRValue[i] = TempColor.R; else if (TempColor.R < MinRValue[i]) MinRValue[i] = TempColor.R; if (TempColor.G > MaxGValue[i]) MaxGValue[i] = TempColor.G; else if (TempColor.G < MinGValue[i]) MinGValue[i] = TempColor.G; if (TempColor.B > MaxBValue[i]) MaxBValue[i] = TempColor.B; else if (TempColor.B < MinBValue[i]) MinBValue[i] = TempColor.B; ++NumPixels[i]; } } } } } int j = 0; int MinDifference = 10000; for (int i = 0; i < 4; ++i) { int CurrentDifference = (MaxRValue[i] - MinRValue[i]) + (MaxGValue[i] - MinGValue[i]) + (MaxBValue[i] - MinBValue[i]); if (CurrentDifference < MinDifference && NumPixels[i] > 0) { j = i; MinDifference = CurrentDifference; } } Color MeanPixel = Color.FromArgb(RValues[j] / NumPixels[j], GValues[j] / NumPixels[j], BValues[j] / NumPixels[j]); NewData.SetPixel(x, y, MeanPixel, NewPixelSize); } }) Link To Source File
Craig-s-Utility-Library BitmapExtensions.cs Parallel.For(0, Width, x => { for (int y = 0; y < Height; ++y) { List RValues = new List(); List GValues = new List(); List BValues = new List(); for (int x2 = ApetureMin; x2 < ApetureMax; ++x2) { int TempX = x + x2; if (TempX >= 0 && TempX < Width) { for (int y2 = ApetureMin; y2 < ApetureMax; ++y2) { int TempY = y + y2; if (TempY >= 0 && TempY < Height) { Color TempColor = OldData.GetPixel(TempX, TempY, OldPixelSize); RValues.Add(TempColor.R); GValues.Add(TempColor.G); BValues.Add(TempColor.B); } } } } Color MedianPixel = Color.FromArgb(RValues.Median(), GValues.Median(), BValues.Median()); NewData.SetPixel(x, y, MedianPixel, NewPixelSize); } }) Link To Source File
Craig-s-Utility-Library BitmapExtensions.cs Parallel.For(0, Width, x => { for (int y = 0; y < Height; ++y) { Color CurrentPixel = OldData.GetPixel(x, y, OldPixelSize); Color TempValue = Color.FromArgb(255 - CurrentPixel.R, 255 - CurrentPixel.G, 255 - CurrentPixel.B); NewData.SetPixel(x, y, TempValue, NewPixelSize); } }) Link To Source File
Craig-s-Utility-Library BitmapExtensions.cs Parallel.For(0, Width, x => { for (int y = 0; y < Height; ++y) { Color Pixel1 = OldData1.GetPixel(x, y, OldPixelSize1); Color Pixel2 = OldData2.GetPixel(x, y, OldPixelSize2); NewData.SetPixel(x, y, Color.FromArgb(Pixel1.R | Pixel2.R, Pixel1.G | Pixel2.G, Pixel1.B | Pixel2.B), NewPixelSize); } }) Link To Source File
Craig-s-Utility-Library BitmapExtensions.cs Parallel.For(MinX, MaxX, x2 => { for (int y2 = MinY; y2 < MaxY; ++y2) { NewData.SetPixel(x2, y2, TempPixel, NewPixelSize); } }) Link To Source File
Craig-s-Utility-Library BitmapExtensions.cs Parallel.For(0, Width, x => { for (int y = 0; y < Height; ++y) { double Value1 = 0; double Value2 = 0; if (YDirection) Value1 = System.Math.Sin(((x * Frequency) * System.Math.PI) / 180.0d) * Amplitude; if (XDirection) Value2 = System.Math.Sin(((y * Frequency) * System.Math.PI) / 180.0d) * Amplitude; Value1 = y - (int)Value1; Value2 = x - (int)Value2; while (Value1 < 0) Value1 += Height; while (Value2 < 0) Value2 += Width; while (Value1 >= Height) Value1 -= Height; while (Value2 >= Width) Value2 -= Width; NewData.SetPixel(x, y, OldData.GetPixel((int)Value2, (int)Value1, OldPixelSize), NewPixelSize); } }) Link To Source File
Craig-s-Utility-Library BitmapExtensions.cs Parallel.For(0, Width, x => { for (int y = 0; y < Height; ++y) { Color Pixel1 = OldData1.GetPixel(x, y, OldPixelSize1); Color Pixel2 = OldData2.GetPixel(x, y, OldPixelSize2); NewData.SetPixel(x, y, Color.FromArgb((Pixel1.R + Pixel2.R).Clamp(255, 0), (Pixel1.G + Pixel2.G).Clamp(255, 0), (Pixel1.B + Pixel2.B).Clamp(255, 0)), NewPixelSize); } }) Link To Source File
Craig-s-Utility-Library BitmapExtensions.cs Parallel.For(0, Width, x => { for (int y = 0; y < Height; ++y) { int RValue = 0; int GValue = 0; int BValue = 0; int NumPixels = 0; for (int x2 = ApetureMinX; x2 < ApetureMaxX; ++x2) { int TempX1 = x + x2; int TempX2 = x - x2; if (TempX1 >= 0 && TempX1 < Width && TempX2 >= 0 && TempX2 < Width) { for (int y2 = ApetureMinY; y2 < ApetureMaxY; ++y2) { int TempY1 = y + y2; int TempY2 = y - y2; if (TempY1 >= 0 && TempY1 < Height && TempY2 >= 0 && TempY2 < Height) { Color TempColor = OldData.GetPixel(x, y, OldPixelSize); Color TempColor2 = OldData.GetPixel(TempX1, TempY1, OldPixelSize); Color TempColor3 = OldData.GetPixel(TempX2, TempY2, OldPixelSize); if (Distance(TempColor.R, TempColor2.R, TempColor.G, TempColor2.G, TempColor.B, TempColor2.B) < Distance(TempColor.R, TempColor3.R, TempColor.G, TempColor3.G, TempColor.B, TempColor3.B)) { RValue += TempColor2.R; GValue += TempColor2.G; BValue += TempColor2.B; } else { RValue += TempColor3.R; GValue += TempColor3.G; BValue += TempColor3.B; } ++NumPixels; } } } } Color MeanPixel = Color.FromArgb(RValue / NumPixels, GValue / NumPixels, BValue / NumPixels); NewData.SetPixel(x, y, MeanPixel, NewPixelSize); } }) Link To Source File
Craig-s-Utility-Library BitmapExtensions.cs Parallel.For(0, Width, x => { for (int y = 0; y < Height; ++y) { Color CurrentPixel = OldData.GetPixel(x, y, OldPixelSize); Color TempValue = Color.FromArgb(Map(CurrentPixel.R, MinValue.R, MaxValue.R), Map(CurrentPixel.G, MinValue.G, MaxValue.G), Map(CurrentPixel.B, MinValue.B, MaxValue.B)); NewData.SetPixel(x, y, TempValue, NewPixelSize); } }) Link To Source File
Craig-s-Utility-Library BitmapExtensions.cs Parallel.For(0, Width, x => { for (int y = 0; y < Height; ++y) { Color TempColor = OldData.GetPixel(x, y, OldPixelSize); if ((TempColor.R + TempColor.G + TempColor.B) / 755.0f > Threshold) NewData.SetPixel(x, y, Color.White, NewPixelSize); else NewData.SetPixel(x, y, Color.Black, NewPixelSize); } }) Link To Source File
Craig-s-Utility-Library BitmapExtensions.cs Parallel.For(0, Height, y => { for (int x = 0; x < Width; ++x) { float XDistortion = x + (GetHeight(x, y, XNoiseData, XNoisePixelSize) * Power); float YDistortion = y + (GetHeight(x, y, YNoiseData, YNoisePixelSize) * Power); int X1 = ((int)XDistortion).Clamp(Width - 1, 0); int Y1 = ((int)YDistortion).Clamp(Height - 1, 0); ReturnData.SetPixel(x, y, OriginalData.GetPixel(X1, Y1, OriginalPixelSize), ReturnPixelSize); } }) Link To Source File
Craig-s-Utility-Library BitmapExtensions.cs Parallel.For(0, Width, x => { for (int y = 0; y < Height; ++y) { Color Pixel1 = OldData1.GetPixel(x, y, OldPixelSize1); Color Pixel2 = OldData2.GetPixel(x, y, OldPixelSize2); NewData.SetPixel(x, y, Color.FromArgb(Pixel1.R ^ Pixel2.R, Pixel1.G ^ Pixel2.G, Pixel1.B ^ Pixel2.B), NewPixelSize); } }) Link To Source File
Par2NET FastCRC32.cs //for (uint i = 0; i <= 255; i++) //{ // uint crc = crcTable[i]; // for (uint j = 0; j < window; j++) // { // crc = ((crc >> 8) & 0x00ffffff) ^ crcTable[(byte)crc]; // } // windowTable[i] = crc; //} Parallel.For(0, 255, i => { uint crc = crcTable[i]; for (uint j = 0; j < window; j++) { crc = ((crc >> 8) & 0x00ffffff) ^ crcTable[(byte)crc]; } windowTable[i] = crc; }) Link To Source File
sie MeasurementChainSimulatorTest.cs Parallel.For(0, NSimulations, _parallelOptions, i => { contributions[i] = _derivativesCalculator.Calculate(_chainSimulator.SimulatedChains[i], _model, true, true); }) Link To Source File
sie ConditionalChainSimulatorTest.cs Parallel.For(0, NSimulations, _parallelOptions, i => { contributions[i] = _derivativesCalculator.Calculate(_chainSimulator.SimulatedChains[i], _model, true, true); }) Link To Source File
CSEuler Program.cs Parallel.For(0, games, delegate(int i) //for (int i = 0; i < games; i++) { int colin = roll(6, 6); int peter = roll(4, 9); if (colin > peter) { colin_wins++; } else if (peter > colin) { peter_wins++; } } ) Link To Source File
CSEuler Program.cs Parallel.For(sq_min, sq_max, delegate(long i) //for(long i = sq_min; i <= sq_max; i++) { BigInteger temp = i; BigInteger value = temp * temp; // lazy //char[] test = value.ToString().ToCharArray(); //if (test.Length == 19 && test[0] == '1' && test[2] == '2' && test[4] == '3' && test[6] == '4' && test[8] == '5' && test[10] == '6' && test[12] == '7' && test[14] == '8' && test[16] == '9') if(isMatch(value)) { Console.WriteLine("Answer = " + temp + " is " + value); } } ) Link To Source File
CSEuler Problem27.cs Parallel.For(-999, 999, a => { int nplus = max_n + 1; // test to make sure the equation has at least 1 more than the maximum N found if (prog.isPrime((nplus * nplus) + (a * nplus) + b)) { int n = 0; while (prog.isPrime((n * n) + (a * n) + b)) { n++; } if (n - 1 >= max_n) { max_n = n - 1; // lock max_n Console.WriteLine("a: " + a + " b: " + b + " n=" + (n - 1) + " product is " + (a * b)); } } }) Link To Source File
Art MipMap.cs Parallel.For (0, tres, new ParallelOptions { MaxDegreeOfParallelism = cores }, t => { Parallel.For (0, sPow2, new ParallelOptions { MaxDegreeOfParallelism = cores }, s => { // Compute texel $(s,t)$ in $s$-zoomed image resampledImage[t * sPow2 + s] = default (T); for (int j = 0; j < 4; ++j) { int origS = sWeights[s].FirstTexel + j; if (wrapMode == ImageWrap.Repeat) origS = Util.Mod (origS, sres); else if (wrapMode == ImageWrap.Clamp) origS = Util.Clamp (origS, 0, sres - 1); if (origS >= 0 && origS < (int)sres) { dynamic a = sWeights[s].Weight[j]; dynamic b = img[t * sres + origS]; dynamic val = a * b; resampledImage[t * sPow2 + s] += val; } } }); }) Link To Source File
Art MipMap.cs Parallel.For (0, sPow2, new ParallelOptions { MaxDegreeOfParallelism = cores }, s => { // Compute texel $(s,t)$ in $s$-zoomed image resampledImage[t * sPow2 + s] = default (T); for (int j = 0; j < 4; ++j) { int origS = sWeights[s].FirstTexel + j; if (wrapMode == ImageWrap.Repeat) origS = Util.Mod (origS, sres); else if (wrapMode == ImageWrap.Clamp) origS = Util.Clamp (origS, 0, sres - 1); if (origS >= 0 && origS < (int)sres) { dynamic a = sWeights[s].Weight[j]; dynamic b = img[t * sres + origS]; dynamic val = a * b; resampledImage[t * sPow2 + s] += val; } } }) Link To Source File
Art MipMap.cs Parallel.For (0, newres, new ParallelOptions { MaxDegreeOfParallelism = Api.NumberOfCores }, i => { var center = (i + .5) * oldres / newres; if (wt[i] == null) wt[i] = new ResampleWeight (); wt[i].FirstTexel = Util.Floor2Int ((center - filterWidth) + .5); for (var j = 0; j < 4; ++j) { var pos = wt[i].FirstTexel + j + .5; wt[i].Weight[j] = Util.Lanczos ((pos - center) / filterWidth); } var invSumWts = 1.0 / wt[i].Weight.Sum (); wt[i].Weight = wt[i].Weight.Select (x => x * invSumWts).ToArray (); }) Link To Source File
Art SampledSpectrum.cs Parallel.For (0, NumberOfSpectralSamples, new ParallelOptions { MaxDegreeOfParallelism = Api.NumberOfCores }, i => { var wl0 = Util.Lerp ((double)i / (double)(NumberOfSpectralSamples), sampledLambdaStart, sampledLambdaEnd); var wl1 = Util.Lerp ((double)(i + 1) / (double)(NumberOfSpectralSamples), sampledLambdaStart, sampledLambdaEnd); X.c[i] = Spectrum.AverageSpectrumSamples (SpectrumCIE.CIE_Lambda, SpectrumCIE.CIE_X, SpectrumCIE.nCIESamples, wl0, wl1); Y.c[i] = Spectrum.AverageSpectrumSamples (SpectrumCIE.CIE_Lambda, SpectrumCIE.CIE_Y, SpectrumCIE.nCIESamples, wl0, wl1); Z.c[i] = Spectrum.AverageSpectrumSamples (SpectrumCIE.CIE_Lambda, SpectrumCIE.CIE_Z, SpectrumCIE.nCIESamples, wl0, wl1); yint += Y.c[i]; }) Link To Source File
Art SampledSpectrum.cs Parallel.For (0, NumberOfSpectralSamples, new ParallelOptions { MaxDegreeOfParallelism = Api.NumberOfCores }, i => { var wl0 = Util.Lerp ((double)(i) / (double)(NumberOfSpectralSamples), sampledLambdaStart, sampledLambdaEnd); var wl1 = Util.Lerp ((double)(i + 1) / (double)(NumberOfSpectralSamples), sampledLambdaStart, sampledLambdaEnd); rgbRefl2SpectWhite.c[i] = Spectrum.AverageSpectrumSamples (SpectrumCIE.RGB2SpectLambda, SpectrumCIE.RGBRefl2SpectWhite, nRGB2SpectSamples, wl0, wl1); rgbRefl2SpectCyan.c[i] = Spectrum.AverageSpectrumSamples (SpectrumCIE.RGB2SpectLambda, SpectrumCIE.RGBRefl2SpectCyan, nRGB2SpectSamples, wl0, wl1); rgbRefl2SpectMagenta.c[i] = Spectrum.AverageSpectrumSamples (SpectrumCIE.RGB2SpectLambda, SpectrumCIE.RGBRefl2SpectMagenta, nRGB2SpectSamples, wl0, wl1); rgbRefl2SpectYellow.c[i] = Spectrum.AverageSpectrumSamples (SpectrumCIE.RGB2SpectLambda, SpectrumCIE.RGBRefl2SpectYellow, nRGB2SpectSamples, wl0, wl1); rgbRefl2SpectRed.c[i] = Spectrum.AverageSpectrumSamples (SpectrumCIE.RGB2SpectLambda, SpectrumCIE.RGBRefl2SpectRed, nRGB2SpectSamples, wl0, wl1); rgbRefl2SpectGreen.c[i] = Spectrum.AverageSpectrumSamples (SpectrumCIE.RGB2SpectLambda, SpectrumCIE.RGBRefl2SpectGreen, nRGB2SpectSamples, wl0, wl1); rgbRefl2SpectBlue.c[i] = Spectrum.AverageSpectrumSamples (SpectrumCIE.RGB2SpectLambda, SpectrumCIE.RGBRefl2SpectBlue, nRGB2SpectSamples, wl0, wl1); rgbIllum2SpectWhite.c[i] = Spectrum.AverageSpectrumSamples (SpectrumCIE.RGB2SpectLambda, SpectrumCIE.RGBIllum2SpectWhite, nRGB2SpectSamples, wl0, wl1); rgbIllum2SpectCyan.c[i] = Spectrum.AverageSpectrumSamples (SpectrumCIE.RGB2SpectLambda, SpectrumCIE.RGBIllum2SpectCyan, nRGB2SpectSamples, wl0, wl1); rgbIllum2SpectMagenta.c[i] = Spectrum.AverageSpectrumSamples (SpectrumCIE.RGB2SpectLambda, SpectrumCIE.RGBIllum2SpectMagenta, nRGB2SpectSamples, wl0, wl1); rgbIllum2SpectYellow.c[i] = Spectrum.AverageSpectrumSamples (SpectrumCIE.RGB2SpectLambda, SpectrumCIE.RGBIllum2SpectYellow, nRGB2SpectSamples, wl0, wl1); rgbIllum2SpectRed.c[i] = Spectrum.AverageSpectrumSamples (SpectrumCIE.RGB2SpectLambda, SpectrumCIE.RGBIllum2SpectRed, nRGB2SpectSamples, wl0, wl1); rgbIllum2SpectGreen.c[i] = Spectrum.AverageSpectrumSamples (SpectrumCIE.RGB2SpectLambda, SpectrumCIE.RGBIllum2SpectGreen, nRGB2SpectSamples, wl0, wl1); rgbIllum2SpectBlue.c[i] = Spectrum.AverageSpectrumSamples (SpectrumCIE.RGB2SpectLambda, SpectrumCIE.RGBIllum2SpectBlue, nRGB2SpectSamples, wl0, wl1); }) Link To Source File
KMLib RBFEvaluator.cs Parallel.For(0, elements.Length, i => { //for (int i = 0; i < elements.Length; i++) //{ float x1Squere = elements[i].DotProduct();// linKernel.Product(elements[i], elements[i]);//linKernel.DiagonalDotCache[i]; float sum = 0; int index = -1; for (int k = 0; k < TrainedModel.SupportElementsIndexes.Length; k++) { //support vector squere float x2Squere = linKernel.DiagonalDotCache[k]; float dot = linKernel.Product(elements[i], TrainedModel.SupportElements[k]); float rbfVal = (float)Math.Exp(-gamma * (x1Squere + x2Squere - 2 * dot)); index = TrainedModel.SupportElementsIndexes[k]; sum += TrainedModel.Alpha[index] * TrainedModel.Y[k] * rbfVal; } sum -= TrainedModel.Bias; predictions[i] = sum < 0 ? -1 : 1; } ) Link To Source File
KMLib SequentialEvaluator.cs Parallel.For(0, elements.Length, i => { predictions[i] = Predict(elements[i]); }) Link To Source File
KMLib BBLinSolver.cs Parallel.For(0, grad.Length, i => { }) Link To Source File
KMLib ParallelSmoFanSolver.cs // Parallel.For(0, active_size, () => new Pair>(-INF, new Pair(-1, INF)), (j, loopState, maxMinPair) => { if (y[j] == +1) { if (!is_lower_bound(j)) { float grad_diff = GMax + G[j]; if (G[j] >= maxMinPair.First) maxMinPair.First = G[j]; if (grad_diff > 0) { float obj_diff; float quad_coef = (float)(Q_i[i] + QD[j] - 2.0 * y[i] * Q_i[j]); if (quad_coef > 0) obj_diff = -(grad_diff * grad_diff) / quad_coef; else obj_diff = (float)(-(grad_diff * grad_diff) / 1e-12); if (obj_diff < maxMinPair.Second.Second) { maxMinPair.Second.First = j; maxMinPair.Second.Second = obj_diff; } } } } else { if (!is_upper_bound(j)) { float grad_diff = GMax - G[j]; if (-G[j] >= maxMinPair.First) maxMinPair.First = -G[j]; if (grad_diff > 0) { float obj_diff; float quad_coef = (float)(Q_i[i] + QD[j] + 2.0 * y[i] * Q_i[j]); if (quad_coef > 0) obj_diff = -(grad_diff * grad_diff) / quad_coef; else obj_diff = (float)(-(grad_diff * grad_diff) / 1e-12); if (obj_diff < maxMinPair.Second.Second) { maxMinPair.Second.First = j; maxMinPair.Second.Second = obj_diff; } } } } //if (maxMinPair.Second.First == -1) // return null; return maxMinPair; }, (maxMinPair) => { if (maxMinPair != null && maxMinPair.Second.First != -1) lock (lockObj) { if (GMax2Tmp < maxMinPair.First) GMax2Tmp = maxMinPair.First; minIdx.Add(maxMinPair.Second.First, maxMinPair.Second.Second); } } ) Link To Source File
KMLib CudaVectorEvaluator.cs Parallel.For(0,TrainedModel.SupportElementsIndexes.Length, i=>{ int idx = TrainedModel.SupportElementsIndexes[i]; svLabels[i] = TrainedModel.Y[i]; //svLabels[i] = TrainningProblem.Labels[idx]; svAlphas[i] = TrainedModel.Alpha[idx]; }) Link To Source File
EngineNine PathGridBuilder.cs Parallel.For(0, height, y => //for (int y = 0; y < height; y++) { var rayPicks = new List(); for (int x = 0; x < width; x++) { var pickRay = new Ray(); pickRay.Position.X = bounds.Min.X + x * step; pickRay.Position.Y = bounds.Min.Y + y * step; pickRay.Position.Z = bounds.Max.Z; pickRay.Direction.Z = -1; lock (SyncRoot) { scene.FindAll(ref pickRay, rayPicks); } float min = float.MaxValue; foreach (var pick in rayPicks) { if (!(pick.OriginalTarget is DrawableSurfacePatch)) continue; var geometry = pick.OriginalTarget as IGeometry; if (geometry != null) { int i = 0; lock (SyncRoot) { // Force lazy initialization; i += geometry.Indices.Length; i += geometry.Positions.Length; var pickResult = pickRay.Intersects(geometry); if (pickResult.HasValue && pickResult.Value < min) { min = pickResult.Value; heightmap[x + y * width] = pickRay.Position.Z - min; } } } } rayPicks.Clear(); } //} }) Link To Source File
EngineNine PathGridBuilder.cs Parallel.For(0, heightmap.Height, y => //for (int y = 0; y < heightmap.Height; y++) { for (int x = 0; x < heightmap.Width; x++) { var p1 = heights[x + y * width]; var p2 = heights[x + 1 + y * width]; var p3 = heights[x + (y + 1) * width]; var p4 = heights[x + 1 + (y + 1) * width]; collisions[x + y * width] = (Math.Abs(p1 - p2) > maxHeightDif || Math.Abs(p1 - p3) > maxHeightDif || Math.Abs(p2 - p4) > maxHeightDif || Math.Abs(p3 - p4) > maxHeightDif || Math.Abs(p1 - p4) > maxHeightDif * sqrtTwo || Math.Abs(p2 - p3) > maxHeightDif * sqrtTwo); } //} }) Link To Source File
EngineNine PathGridBuilder.cs Parallel.For(0, heightmap.Height, y => //for (int y = 0; y < heightmap.Height; y++) { var obstacles = new List(); for (int x = 0; x < heightmap.Width; x++) { var index = x + y * width; if (collisionMap[index]) continue; var p1 = heights[x + y * width]; var p2 = heights[x + 1 + y * width]; var p3 = heights[x + (y + 1) * width]; var p4 = heights[x + 1 + (y + 1) * width]; var boundingBox = new BoundingBox(); boundingBox.Min.Z = (p1 + p2 + p3 + p4) * 0.25f; boundingBox.Max.Z = boundingBox.Min.Z + maxActorHeight; boundingBox.Min.X = bounds.Min.X + x * step; boundingBox.Min.Y = bounds.Min.Y + y * step; boundingBox.Max.X = boundingBox.Min.X + step; boundingBox.Max.Y = boundingBox.Min.Y + step; lock (SyncRoot) { scene.FindAll(ref boundingBox, obstacles); } foreach (IGeometry geometry in obstacles) { int i = 0; lock (SyncRoot) { // Force lazy initialization; i += geometry.Indices.Length; i += geometry.Positions.Length; var containment = boundingBox.Contains(geometry); if (containment != ContainmentType.Disjoint) { collisionMap[index] = true; break; } } } obstacles.Clear(); } //} }) Link To Source File
EngineNine Builder.cs Parallel.For(0, height, y => //for (int y = 0; y < height; y++) { var rayPicks = new List(); for (int x = 0; x < width; x++) { var pickRay = new Ray(); pickRay.Position.X = bounds.Min.X + x * step; pickRay.Position.Y = bounds.Min.Y + y * step; pickRay.Position.Z = bounds.Max.Z; pickRay.Direction.Z = -1; lock (SyncRoot) { scene.FindAll(ref pickRay, rayPicks); } float min = float.MaxValue; foreach (var pick in rayPicks) { if (!(pick.OriginalTarget is DrawableSurfacePatch)) continue; var geometry = pick.OriginalTarget as IGeometry; if (geometry != null) { int i = 0; lock (SyncRoot) { // Force lazy initialization; i += geometry.Indices.Length; i += geometry.Positions.Length; var pickResult = pickRay.Intersects(geometry); if (pickResult.HasValue && pickResult.Value < min) { min = pickResult.Value; heightField[x + y * width] = pickRay.Position.Z - min; } } } } rayPicks.Clear(); } //} }) Link To Source File
EngineNine Builder.cs Parallel.For(0, heightmap.Height, y => //for (int y = 0; y < heightmap.Height; y++) { for (int x = 0; x < heightmap.Width; x++) { var p1 = heights[x + y * width]; var p2 = heights[x + 1 + y * width]; var p3 = heights[x + (y + 1) * width]; var p4 = heights[x + 1 + (y + 1) * width]; collisions[x + y * width] = (Math.Abs(p1 - p2) > maxHeightDif || Math.Abs(p1 - p3) > maxHeightDif || Math.Abs(p2 - p4) > maxHeightDif || Math.Abs(p3 - p4) > maxHeightDif || Math.Abs(p1 - p4) > maxHeightDif * sqrtTwo || Math.Abs(p2 - p3) > maxHeightDif * sqrtTwo); } //} }) Link To Source File
EngineNine Builder.cs Parallel.For(0, heightmap.Height, y => //for (int y = 0; y < heightmap.Height; y++) { var obstacles = new List(); for (int x = 0; x < heightmap.Width; x++) { var index = x + y * width; if (collisionMap[index]) continue; var p1 = heights[x + y * width]; var p2 = heights[x + 1 + y * width]; var p3 = heights[x + (y + 1) * width]; var p4 = heights[x + 1 + (y + 1) * width]; var boundingBox = new BoundingBox(); boundingBox.Min.Z = (p1 + p2 + p3 + p4) * 0.25f; boundingBox.Max.Z = boundingBox.Min.Z + maxActorHeight; boundingBox.Min.X = bounds.Min.X + x * step; boundingBox.Min.Y = bounds.Min.Y + y * step; boundingBox.Max.X = boundingBox.Min.X + step; boundingBox.Max.Y = boundingBox.Min.Y + step; lock (SyncRoot) { scene.FindAll(ref boundingBox, obstacles); } foreach (IGeometry geometry in obstacles) { int i = 0; lock (SyncRoot) { // Force lazy initialization; i += geometry.Indices.Length; i += geometry.Positions.Length; var containment = boundingBox.Contains(geometry); if (containment != ContainmentType.Disjoint) { collisionMap[index] = true; break; } } } obstacles.Clear(); } //} }) Link To Source File
FistCore.Lib Default.aspx.cs Parallel.For(0, 100000, x => { CookieUsers cuser = new CookieUsers(Global.ClientPool.ClientList[Global.StatsBucket], "COOKIE_PERO" + x.ToString()); cuser.CookieName = "NOVO_IME"; cuser.CookieValue = "2222"; cuser.NumUsers = x; cuser.Add(); cuser.InSync = false; cuser.Refresh(); cuser.CookieName = "ime " + x.ToString(); cuser.Set(); }) Link To Source File
BarCodeSharp RGBLuminanceSource.cs Parallel.For(0, width, x => { Color c = unsafeBitmap.GetPixel(x, y); luminances[offset + x] = (sbyte)(((int)c.R) << 16 | ((int)c.G) << 8 | ((int)c.B)); }) Link To Source File
nuget CryptoHashProviderTest.cs Parallel.For(0, 10000, ignored => { // Act byte[] actualHash = hashProvider.CalculateHash(testBytes); // Assert Assert.Equal(actualHash, Convert.FromBase64String(expectedHash)); }) Link To Source File
SuperSocket SocketServerTest.cs System.Threading.Tasks.Parallel.For(0, concurrencyCount, i => { if (!RunEchoMessage()) taskEvent.Reset(); semaphore.Release(); }) Link To Source File
SuperSocket UdpSocketServerTest.cs System.Threading.Tasks.Parallel.For(0, concurrencyCount, i => { if (!RunEchoMessage()) taskEvent.Reset(); semaphore.Release(); }) Link To Source File
SuperSocket SocketServerTest.cs System.Threading.Tasks.Parallel.For(0, concurrencyCount, i => { if (!RunEchoMessage()) taskEvent.Reset(); semaphore.Release(); }) Link To Source File
SuperSocket UdpSocketServerTest.cs System.Threading.Tasks.Parallel.For(0, concurrencyCount, i => { if (!RunEchoMessage()) taskEvent.Reset(); semaphore.Release(); }) Link To Source File
ForgeCraft World.cs /*for (int x = -3; x <= 3; x++) { for (int z = -3; z <= 3; z++) { GenerateChunk(x, z); } Server.Log(x + " Row Generated."); }*/ Parallel.For(-3, 3, delegate(int x) { Parallel.For(-3, 3, delegate(int z) { GenerateChunk(x, z); }); Console.WriteLine(x + " Row Generated."); }) Link To Source File
ForgeCraft World.cs Parallel.For(-3, 3, delegate(int z) { GenerateChunk(x, z); }) Link To Source File
Media-Manager FilmSelector.xaml.cs Parallel.For(0, Films.Length, i => //For each link (each different movie) { filmsites[i] = GetHtml("http://www.imdb.com" + links[i]); //First get the HTML of the film page "filmsite" is now a string of entire website //Use GrabImage function to download picture Films[i] = new Film(); Films[i].Name = rgxFindFilmName.Match(filmsites[i].ToString()).Groups[1].Value; //Find the name of the film in the HTML Films[i].Rating = rgxFindRating.Match(filmsites[i].ToString()).Groups[1].Value; //Find the rating of the film in the HTML if ((rgxFindDate.Match(filmsites[i].ToString()).Groups[1].Value) == string.Empty) { Films[i].ReleaseYear = 1000; } else { Films[i].ReleaseYear = Convert.ToInt32(rgxFindDate.Match(filmsites[i].ToString()).Groups[1].Value); //Find the release year of the film in the HTML } List lsGenres = new List(); //Create a list of genres foreach (Match m in rgxFindGenres.Matches(filmsites[i].ToString())) { lsGenres.Add(m.Groups[1].Value); //Foreach genre found add it to the list } for (int inte = 0; inte < lsGenres.Count; inte++) //For each value item of list { for (int x = 0; x < lsGenres.Count; x++) //For each item of list { if (inte != x) //If the items are not the same index { if (lsGenres[inte] == lsGenres[x]) //If they have equal value, but not equal index { lsGenres.RemoveAt(x); //Remove copy } } } } StringBuilder sbGenres = new StringBuilder(); foreach (string s in lsGenres) { sbGenres.Append(s + " "); } Films[i].Genres = sbGenres.ToString(); Films[i].Plot = rgxFindStoryline.Match(filmsites[i].ToString()).Groups[1].Value; } ) Link To Source File
Media-Manager FilmSelector.xaml.cs Parallel.For(0, Films.Length, i => { string ImageURL = rgxGrabPicture.Match(filmsites[i].ToString()).Groups[1].Value; if (string.IsNullOrEmpty(ImageURL)) { ms[i] = null; } else { WebClient wc = new WebClient(); byte[] pic = wc.DownloadData(ImageURL); ms[i] = new MemoryStream(pic); } } ) Link To Source File
NugetCracker CryptoHashProviderTest.cs Parallel.For(0, 10000, ignored => { // Act byte[] actualHash = hashProvider.CalculateHash(testBytes); // Assert Assert.Equal(actualHash, Convert.FromBase64String(expectedHash)); }) Link To Source File
Rebus TestRabbitMqMessageQueue.cs Parallel.For(0, consumers, i => count.Times(() => { var receivedTransportMessage = competingConsumers[i].ReceiveMessage(); if (receivedTransportMessage == null) return; receivedTransportMessage.Data.ShouldBe("w00t!"); Interlocked.Increment(ref receivedMessageCount); })) Link To Source File
OCodeHtm HtmLayer2D.cs Parallel.For(0, Height, row => { Parallel.For(0, Width, col => { NodeArray[row, col].Learn(GetSubMatrixForNodeAt(row, col, input)); }); }) Link To Source File
OCodeHtm HtmLayer2D.cs Parallel.For(0, Width, col => { NodeArray[row, col].Learn(GetSubMatrixForNodeAt(row, col, input)); }) Link To Source File
OCodeHtm HtmLayer2D.cs Parallel.For(0, Height, row => { Parallel.For(0, Width, col => { outputs[row, col] = (Matrix)NodeArray[row, col].Infer(GetSubMatrixForNodeAt(row, col, input)).ToRowMatrix(); }); }) Link To Source File
OCodeHtm HtmLayer2D.cs Parallel.For(0, Width, col => { outputs[row, col] = (Matrix)NodeArray[row, col].Infer(GetSubMatrixForNodeAt(row, col, input)).ToRowMatrix(); }) Link To Source File
OCodeHtm HtmLayer2D.cs Parallel.For(0, Height, row => { Parallel.For(0, Width, col => { lock (output) { output.SetSubMatrix(row, 1, col * MaxNodeOutputSize, outputs[row, col].ColumnCount, outputs[row, col]); } }); }) Link To Source File
OCodeHtm HtmLayer2D.cs Parallel.For(0, Width, col => { lock (output) { output.SetSubMatrix(row, 1, col * MaxNodeOutputSize, outputs[row, col].ColumnCount, outputs[row, col]); } }) Link To Source File
OCodeHtm Spatial2DLayer.cs Parallel.For(0, Height, row => { Parallel.For(0, Width, col => { NodeArray[row, col] = CreateNode(); }); }) Link To Source File
OCodeHtm Spatial2DLayer.cs Parallel.For(0, Width, col => { NodeArray[row, col] = CreateNode(); }) Link To Source File
OCodeHtm Spatial2DLayer.cs Parallel.For(0, Height, row => { Parallel.For(0, Width, col => { NodeArray[row, col] = ClonedNode.Clone(); }); }) Link To Source File
OCodeHtm Spatial2DLayer.cs Parallel.For(0, Width, col => { NodeArray[row, col] = ClonedNode.Clone(); }) Link To Source File
Fuzz ArrayExtensions.cs Parallel.For(0, dim1, i => Parallel.For(0, dim2, j => { result[i, j] = transform(self[i, j]); })) Link To Source File
Fuzz ArrayExtensions.cs Parallel.For(0, dim2, j => { result[i, j] = transform(self[i, j]); }) Link To Source File
Fuzz ArrayExtensions.cs Parallel.For(0, dim1, i => Parallel.For(0, dim2, j => { result[i, j] = transform(self[i, j]); })) Link To Source File
Fuzz ArrayExtensions.cs Parallel.For(0, dim2, j => { result[i, j] = transform(self[i, j]); }) Link To Source File
Fuzz ArrayExtensions.cs Parallel.For(0, dim1, i => Parallel.For(0, dim2, j => { result[i, j] = transform(self[i, j]); })) Link To Source File
Fuzz ArrayExtensions.cs Parallel.For(0, dim2, j => { result[i, j] = transform(self[i, j]); }) Link To Source File
Fuzz LocallyAdaptiveFuzzyHistogramHiberbolization.cs Parallel.For(0, pixels.GetLength(0), i => Parallel.For(0, pixels.GetLength(1), j => { Tuple minMax = localValues[i, j]; memberships[i, j] = MembershipFunction( pixels[i, j], minMax.Item1, minMax.Item2); } )) Link To Source File
Fuzz LocallyAdaptiveFuzzyHistogramHiberbolization.cs Parallel.For(0, pixels.GetLength(1), j => { Tuple minMax = localValues[i, j]; memberships[i, j] = MembershipFunction( pixels[i, j], minMax.Item1, minMax.Item2); } ) Link To Source File
Fuzz FuzzyClassifierEdgeDetector.cs Parallel.For(1, width - 1, i => Parallel.For(1, height - 1, j => { int[] featureVector = CalculateFeatureVector(i, j, pixels); double edgeMemberhip = EdgedMembership(featureVector); double backgroundMembership = BackgroundMembership(featureVector); if (backgroundMembership >= edgeMemberhip) { result[i, j] = 255; } else { result[i, j] = 0; } })) Link To Source File
Fuzz FuzzyClassifierEdgeDetector.cs Parallel.For(1, height - 1, j => { int[] featureVector = CalculateFeatureVector(i, j, pixels); double edgeMemberhip = EdgedMembership(featureVector); double backgroundMembership = BackgroundMembership(featureVector); if (backgroundMembership >= edgeMemberhip) { result[i, j] = 255; } else { result[i, j] = 0; } }) Link To Source File
OG-DotNet HeartbeatSenderTests.cs Parallel.For(0, 100, _ => CanCreateAndDispose()) Link To Source File
OG-DotNet MemoizerTest.cs Parallel.For(1, repeats, i => ConcurrentAddsReturnSingelton()) Link To Source File
OG-DotNet OpenGammaFudgeContextTests.cs Parallel.For(1, 4 * Environment.ProcessorCount, _ => Thrash(context)) Link To Source File
OG-DotNet RemoteViewClientBatchTests.cs //TODO this is not the right place for this test Parallel.For(1, 20, new ParallelOptions { MaxDegreeOfParallelism = 4 }, delegate(int i) { IViewExecutionOptions req = ExecutionOptions.SingleCycle; var runToCompletion = RunToCompletion(req); Assert.Equal(1, runToCompletion.Item1.Count()); Assert.Equal(1, runToCompletion.Item2.Count()); AssertApproximatelyEqual(req.ExecutionSequence.Next.ValuationTime, runToCompletion.Item2.Single().FullResult. ValuationTime); }) Link To Source File
OG-DotNet RemoteViewCycleTests.cs //Use parallel to fill the thread pool Parallel.For(0, cyclesCount, _ => cycles.Add(remoteViewClient.CreateCycleReference(e.FullResult.ViewCycleId))) Link To Source File
Paralect.ServiceBus ConcurrentTest.cs Parallel.For(0, 15, (i, state) => { var unity = new UnityContainer(); var tracker = new Tracker(); unity.RegisterInstance(tracker); var config = new Configuration(unity) .SetName("PS " + i) .SetInputQueue("PSB.App1.Input") .SetErrorQueue("PSB.App1.Error") .AddEndpoint("Paralect.ServiceBus.Test.Messages", "PSB.App2.Input"); using (var bus = new ServiceBus(config)) { bus.Run(); Thread.Sleep(10); } }) Link To Source File
Authorisation Steps.cs Parallel.For(0, times, i => { var o = TestContext.Store.Operations; }) Link To Source File
DBMulticast MultiCast.cs Parallel.For(0, args.Servers.Count, (j) => { if (queryWorker.CancellationPending) return; var item = args.Servers[j]; var serverID = item.servername + "|" + item.database; var svrstart = DateTime.Now; try { queryWorker.ReportProgress(0, new QueryProgress() { ServerID = serverID, Status = ExecutionStatus.Executing }); using (DataSet _ds = DBAccess.GetDataSet(args.SqlText, DBAccess.GetServerConnectionString(item), timeout, item)) { TimeSpan elapsed = DateTime.Now.Subtract(svrstart); TotalTime += elapsed; if (_ds.Tables.Count == 0) { queryWorker.ReportProgress(0, new QueryProgress() { Message = "Did not return a record set.", ServerID = serverID, Status = ExecutionStatus.Error, Elapsed = elapsed }); } else { if (!args.ReturnText) { queryWorker.ReportProgress(0, new QueryProgress() { ResultData = _ds, ServerID = serverID, Status = ExecutionStatus.Success, Elapsed = elapsed }); } else { var sb = new StringBuilder(_ds.Tables[0].Rows.Count * 10); foreach (DataRow dr in _ds.Tables[0].Rows) { if (queryWorker.CancellationPending) break; sb.Append(dr[1].ToString()); } queryWorker.ReportProgress(0, new QueryProgress() { ResultDataText = sb.ToString(), ServerID = serverID, Status = ExecutionStatus.Success, Elapsed = elapsed }); } } } } catch (Exception ex) { if (ex.Message.Contains("Operation cancelled by user.")) { queryWorker.ReportProgress(0, new QueryProgress() { Message = "Cancelled.", ServerID = serverID, Status = ExecutionStatus.Success, Elapsed = DateTime.Now.Subtract(svrstart) }); } else { queryWorker.ReportProgress(0, new QueryProgress() { Message = ex.Message, ServerID = serverID, Status = ExecutionStatus.Error, Elapsed = DateTime.Now.Subtract(svrstart) }); } } } ) Link To Source File
pubnub Publish.cs Parallel.For(0, 5, (i) => { pn.Publish("csharp_unit_test", new { }); }) Link To Source File
pubnub Subscribe.cs Parallel.For(0, 10, (i) => { lock (sync) list.Add(i, null); // System.Diagnostics.Debug.WriteLine(i); var test = (pn.Publish("csharp_throughput_test", new { ID = i })); if (!test) { lock (sync) list.Remove(i); System.Diagnostics.Debug.WriteLine("Failed: " + i); } }) Link To Source File
IrcSharp IrcServer.cs Parallel.For(0, count, i => { Client client; if (!RecvClientQueue.TryDequeue(out client)) return; if (!client.Running) return; Interlocked.Exchange(ref client.TimesEnqueuedForRecv, 0); ByteQueue bufferToProcess = client.GetBufferToProcess(); string[] commands = bufferToProcess.GetCommands(); if(commands.Length > 1) { for(int u = 0; u < commands.Length - 1; u++) { client.Logger.Log(Logger.LogLevel.Debug, commands[u]); int index = commands[u].IndexOf(' '); var myEnum = (PacketType)Enum.Parse(typeof(PacketType), commands[u].Substring(0,index)); PacketHandler handler = PacketHandlers.GetHandler(myEnum); if(handler != null) handler.OnReceive(client, Encoding.UTF8.GetBytes(commands[u])); else { client.Logger.Log(Logger.LogLevel.Error, "Command unknown: " + commands[u]); } } } }) Link To Source File
IrcSharp IrcServer.cs Parallel.For(0, count, i => { Client client; if (!SendClientQueue.TryDequeue(out client)) return; if (!client.Running) { client.DisposeSendSystem(); return; } client.SendStart(); }) Link To Source File
preprompt-parallel ParallelFor.cs Parallel.For(fromInclusive, toExclusive, () => seed, (i, state, local) => { if (state.IsStopped) { return local; } try { // // Return the current replica's new local state. // return aggregator(local, selector(i)); } catch { // // Exceptions also break loops. // state.Stop(); return local; } }, partial => { lock (aggLock) result = aggregator(partial, result); }) Link To Source File
ENN EvolvingNeuralNetwork.cs Parallel.For(0, topologies.Length, i => { accuracies[i] = TrainNetwork(ref topologies[i]); }) Link To Source File
ENN TreadedHiddenLayer.cs Parallel.For(0, nodes.Length, i => { cValues[i] = nodes[i].GetValue(values); }) Link To Source File
Viola-Jones-Mono StrongClassifier.cs Parallel.For(0, features.Length, (iFeature) => { //for (var iFeature = 0; iFeature < FeaturesList.Length; iFeature++) { if (AvailableMemory() > Config.MinFreeMemory) { var values = FeatureValue.ComputeAllValuesSorted(Tests, features[iFeature]); featuresValues[iFeature] = new FeatureValues(features[iFeature], values); } else featuresValues[iFeature] = new FeatureValues(features[iFeature], null); }) Link To Source File
Viola-Jones-Mono StrongClassifier.cs Parallel.For(0, filenames.Length, (i) => { images[i] = LoadImage(filenames[i]); }) Link To Source File
Raven.Json CloningTests.cs Parallel.For(0, 10000, i => { Assert.True(copy.ContainsKey("@id")); var foo = (RavenJObject)copy.CloneToken(); Assert.True(foo.ContainsKey("@id")); Assert.True(copy.ContainsKey("@id")); }) Link To Source File
ravendb MultiGet.cs Parallel.For(0, requests.Length, position => HandleRequest(requests, results, position, context, ravenHttpConfiguration, contexts) ) Link To Source File
ravendb CloningTests.cs Parallel.For(0, 10000, i => { Assert.True(copy.ContainsKey("@id")); var foo = (RavenJObject)copy.CloneToken(); Assert.True(foo.ContainsKey("@id")); Assert.True(copy.ContainsKey("@id")); }) Link To Source File
ravendb MultiThreadedWrites.cs Parallel.For(0, 10, counter => { Table table = counter % 2 == 0 ? tableOne : tableTwo; ProcessTask(counter, database, table); }) Link To Source File
Queem BenchmarkProvider.cs Parallel.For(0, benchmarks.Count, (i) => { if (verbose) Console.WriteLine("Started test " + i + "..."); benchmarks[i].Run(maxdepth); if (verbose) Console.WriteLine("Test " + i + " finished."); }) Link To Source File
natix LC_PRNN.cs //pops.MaxDegreeOfParallelism = 1; Parallel.For (1, this.MainSpace.Count, classify_object) Link To Source File
DynamicScript ScriptArray.cs Parallel.For(0L, leftLength, i => result[i] = left[new[] { i }, state]) Link To Source File
DynamicScript ScriptArray.cs Parallel.For(0L, rightLength, i => result[i + leftLength] = right[new[] { i }, state]) Link To Source File
DynamicScript Extensions.cs Parallel.For(0, length, i => destination[i + destinationIndex] = source[i + sourceIndex]) Link To Source File
DynamicScript RuntimeHelpers.cs Parallel.For(0, values.Count, i => result[i] = values[i].GetContractBinding()) Link To Source File
DynamicScript ScriptMethod.cs Parallel.For(0, arguments.Length, i => scriptArguments[i] = NativeObject.ConvertFrom(arguments[i], parameterTypes[i])) Link To Source File
Castle.Adapter AdapterAssemblyTests.cs Parallel.For(0, count, i => types[i] = assembly.GetAdapterType(type)) Link To Source File
Tag-Cache CacheStoreTest.cs Parallel.For(1, 100000, delegate(int ndx) { string key = String.Format("cacheItem{0}", ndx); CacheItem cacheItem = new CacheItem(key, null, 0, null); cacheStore.Set(cacheItem); Assert.IsTrue(Object.ReferenceEquals(cacheItem, cacheStore.Get(cacheItem.Key)), "unable to read back value from ICacheStore"); }) Link To Source File
Tag-Cache ScavengedExpirableCacheStoreTest.cs Parallel.For(0, 10, delegate(int ndx) { for (int i = 0; i < 10000; i++) { cacheStore.Set(new CacheItem(String.Format("key_{0}_{1}", ndx, i), null, 0, null)); } }) Link To Source File
Tag-Cache ScavengedExpirableCacheStoreTest.cs Parallel.For(0, 5, (i) => _CacheBombard(cacheStore, new Random(i))) Link To Source File
Tag-Cache TaggedScavengedExpirableCacheStoreTest.cs Parallel.For(0, 5, (i) => _CacheBombard(cacheStore, new Random(i))) Link To Source File
particle_system ISimulationScheme.cs Parallel.For (0, trailBundleCount, bundleIndex => { var particleCount = Position.Length; var trailSize = TrailSize; var firsttrail = bundleIndex * trailBundleSize * trailSize; var lasttrail = Math.Min(firsttrail + trailBundleSize , particleCount); var dt = (float)system.DT; var speedBound = m_SpeedUpperBound; var delta = Vector4.Zero; var size = 0f; for (int j = 0; j < StepsPerFrame; j++) { for (int i = firsttrail ; i < lasttrail ; i += 1) { //i is the trail's first element var pi = i + Meta[i].Leader; Meta[i].Leader = (Meta[i].Leader + trailBundleSize) % (trailSize * trailBundleSize); var ii = i + Meta[i].Leader; if (ii >= particleCount) { ii = i; Meta[i].Leader = 0; } if(MapMode == MapModeType.ForceField) { delta = new Vector4 (Meta[i].Velocity * dt, 0); var dv = (Vector4)fun ((Vector4d) Position[pi]) * dt; Meta[i].Velocity += dv.Xyz; } else { delta = (Vector4)fun ((Vector4d)Position[pi]) * dt; //delta.W = 0; } //Vector4.Add(ref position_old, ref delta, out Position[ii]); size = system.ParticleGenerator.UpdateSize(system, i, ii); Position[ii] = Position[pi] + delta; Dimension[ii] = new Vector4 (size, size, size, size); // var b0 = delta; var b2 = new Vector4( Vector3.Cross( b0.Xyz, Rotation[pi].Row1.Xyz), 0); var b1 = new Vector4( Vector3.Cross( b2.Xyz, b0.Xyz), 0); b0.Normalize(); b1.Normalize(); b2.Normalize(); Rotation[ii] = new Matrix4(b0, b1, b2, new Vector4(0,0,0,1)); // switch (ColorScheme) { case ColorSchemeType.Distance: var speed = delta.LengthFast/ dt; var A = MathHelper2.Clamp (2 * speed / speedBound, 0, 1); speedBound = speedBound < speed? speed: speedBound; Color[ii] = (new Vector4 (1, 0.2f, 0.2f, 1) * A + new Vector4 (0.2f, 1, 0.2f, 1) * (1 - A)); break; case ColorSchemeType.Color: Color[ii] = new Vector4 (0.2f, 1, 0.2f, 1); break; default: break; } // if (j == StepsPerFrame - 1) { if (Meta[i].LifeLen <= 0) system.ParticleGenerator.NewBundle (system, i); else Meta[i].LifeLen--; } } } var orig = m_SpeedUpperBound; var help = orig; while( speedBound > orig && (help = Interlocked.CompareExchange(ref m_SpeedUpperBound, speedBound, orig)) != orig) orig = help; }) Link To Source File
Rosette ParallelFractalImageProcessor.cs Parallel.For(0, height, y => { Parallel.For(0, width, x => { SetPoint(buffer, fractal, x, y); }); }) Link To Source File
Rosette ParallelFractalImageProcessor.cs Parallel.For(0, width, x => { SetPoint(buffer, fractal, x, y); }) Link To Source File
MembaseManagementClient ManagementClientPoolTests.cs Parallel.For(0, numClients, i => { var currentClient = _pool.GetCurrentClient(); _pool.HandleClientFailure(currentClient); var nextClient = _pool.GetCurrentClient(); Assert.AreNotSame(nextClient, currentClient); }) Link To Source File
SharpBag BagMath.cs Parallel.For(1, sqrt, x => { uint xx = (uint)(x * x); for (uint y = 1; y <= sqrt; y++) { var yy = y * y; var n = 4 * xx + yy; if (n <= max && (n % 12 == 1 || n % 12 == 5)) isPrime[n] ^= true; n = 3 * xx + yy; if (n <= max && n % 12 == 7) isPrime[n] ^= true; n = 3 * xx - yy; if (x > y && n <= max && n % 12 == 11) isPrime[n] ^= true; } }) Link To Source File
snippets ParallelStarter.cs Parallel.For(2, 20, i => { var result = SumRootN(i); Console.WriteLine("root {0} : {1} ", i, result); }) Link To Source File
sharpneat-tutorials ParallelCoevolutionListEvaluator.cs // Exhaustively compete individuals against each other. Parallel.For(0, genomeList.Count, delegate(int i) { for(int j = 0; j < genomeList.Count; j++) { // Don't bother evaluating inviduals against themselves. if (i == j) continue; // Decode the first genome. TPhenome phenome1 = _genomeDecoder.Decode(genomeList[i]); // Check that the first genome is valid. if (phenome1 == null) continue; // Decode the second genome. TPhenome phenome2 = _genomeDecoder.Decode(genomeList[j]); // Check that the second genome is valid. if (phenome2 == null) continue; // Compete the two individuals against each other and get // the results. FitnessInfo fitness1, fitness2; _phenomeEvaluator.Evaluate(phenome1, phenome2, out fitness1, out fitness2); // Add the results to each genome's overall fitness. // Note that we need to use a lock here because // the += operation is not atomic. lock (results) { results[i]._fitness += fitness1._fitness; results[i]._alternativeFitness += fitness1._alternativeFitness; results[j]._fitness += fitness2._fitness; results[j]._alternativeFitness += fitness2._alternativeFitness; } } }) Link To Source File
sharpneat-tutorials ParallelHallOfFameListEvaluator.cs // Exhaustively compete individuals against each other. Parallel.For(0, genomeList.Count, delegate(int i) { // Decode the first genome. TPhenome phenome1 = _genomeDecoder.Decode(genomeList[i]); // Check that the first genome is valid. if (phenome1 == null) return; for (int j = 0; j < _hallOfFame.Count; j++) { // Decode the second genome. TPhenome phenome2 = _genomeDecoder.Decode(_hallOfFame[j]); // Check that the second genome is valid. if (phenome2 == null) continue; // Compete the two individuals against each other and get the results. FitnessInfo fitness1, fitness2; _phenomeEvaluator.Evaluate(phenome1, phenome2, out fitness1, out fitness2); // Add the results to each genome's overall fitness. // Note that we need to use a lock here because // the += operation is not atomic. // ENHANCEMENT: I don't think this lock is necessary here since the hall of fame // is our inner loop. lock (results) { results[i]._fitness += fitness1._fitness * championGameWeight; results[i]._alternativeFitness += fitness1._alternativeFitness * championGameWeight; } } }) Link To Source File
sharpneat-tutorials HostParasiteCoevolutionListEvaluator.cs // Exhaustively compete individuals against each other. Parallel.For(0, hostGenomeList.Count, delegate(int i) { // Decode the host genome. TPhenome host = _genomeDecoder.Decode(hostGenomeList[i]); // Check that the host genome is valid. if (host == null) return; // Evaluate the host against the parasites. for (int j = 0; j < _parasiteGenomes.Count; j++) { // Decode the champion genome. TPhenome parasite = _genomeDecoder.Decode(_parasiteGenomes[j]); // Check that the champion genome is valid. if (parasite == null) continue; // Compete the two individuals against each other and get the results. FitnessInfo hostFitness, parasiteFitness; _phenomeEvaluator.Evaluate(host, parasite, out hostFitness, out parasiteFitness); // Add the results to each genome's overall fitness. results[i]._fitness += hostFitness._fitness; results[i]._alternativeFitness += hostFitness._alternativeFitness; } // Evaluate the host against the champions. for (int j = 0; j < champions.Length; j++) { // Decode the champion genome. TPhenome champion = _genomeDecoder.Decode(champions[j]); // Check that the champion genome is valid. if (champion == null) continue; // Compete the two individuals against each other and get the results. FitnessInfo hostFitness, championFitness; _phenomeEvaluator.Evaluate(host, champion, out hostFitness, out championFitness); // Add the results to each genome's overall fitness. results[i]._fitness += hostFitness._fitness; results[i]._alternativeFitness += hostFitness._alternativeFitness; } }) Link To Source File
Tasks.Show TestUtil.cs System.Threading.Tasks.Parallel.For(0, a.AllFolders.Count, i => { Assert.AreEqual(a.AllFolders[i], b.AllFolders[i]); }) Link To Source File
Tasks.Show TestUtil.cs System.Threading.Tasks.Parallel.For(0, a.Tasks.Count, i => { Assert.IsTrue(ArraysEqual(GetArray(a.Tasks[i]), GetArray(b.Tasks[i]))); }) Link To Source File
GPU.NET-Example-Projects BlackScholes.cs // Loop over the stock data and calculate the call and put prices for each Parallel.For(0, callResult.Length, OptionIndex => { float s = stockPrice[OptionIndex]; float x = optionStrike[OptionIndex]; float t = optionYears[OptionIndex]; // Calculate the square root of the time to option expiration, in years float SqrtT = (float)Math.Sqrt(t); // Calculate the Black-Scholes parameters float d1 = ((float)Math.Log(s / x) + (riskFree + 0.5f * volatility * volatility) * t) / (volatility * SqrtT); float d2 = d1 - volatility * SqrtT; // Plug the parameters into the Cumulative Normal Distribution (CND) float K1 = 1.0f / (1.0f + 0.2316419f * Math.Abs(d1)); float CndD1 = RSQRT2PI * (float)Math.Exp(-0.5f * d1 * d1) * (K1 * (A1 + K1 * (A2 + K1 * (A3 + K1 * (A4 + K1 * A5))))); if (d1 > 0) { CndD1 = 1.0f - CndD1; } float K2 = 1.0f / (1.0f + 0.2316419f * Math.Abs(d2)); float CndD2 = RSQRT2PI * (float)Math.Exp(-0.5f * d2 * d2) * (K2 * (A1 + K2 * (A2 + K2 * (A3 + K2 * (A4 + K2 * A5))))); if (d2 > 0) { CndD2 = 1.0f - CndD2; } // Calculate the discount rate float ExpRT = (float)Math.Exp(-1.0f * riskFree * t); // Calculate the values of the call and put options callResult[OptionIndex] = s * CndD1 - x * ExpRT * CndD2; putResult[OptionIndex] = x * ExpRT * (1.0f - CndD2) - s * (1.0f - CndD1); }) Link To Source File
GPU.NET-Example-Projects BlackScholes.cs // Loop over the stock data and calculate the call and put prices for each Parallel.For(0, callResult.Length, OptionIndex => { float s = stockPrice[OptionIndex]; float x = optionStrike[OptionIndex]; float t = optionYears[OptionIndex]; // Calculate the square root of the time to option expiration, in years float SqrtT = (float)Math.Sqrt(t); // Calculate the Black-Scholes parameters float d1 = ((float)Math.Log(s / x) + (riskFree + 0.5f * volatility * volatility) * t) / (volatility * SqrtT); float d2 = d1 - volatility * SqrtT; // Plug the parameters into the Cumulative Normal Distribution (CND) float K1 = 1.0f / (1.0f + 0.2316419f * Math.Abs(d1)); float CndD1 = RSQRT2PI * (float)Math.Exp(-0.5f * d1 * d1) * (K1 * (A1 + K1 * (A2 + K1 * (A3 + K1 * (A4 + K1 * A5))))); if (d1 > 0) { CndD1 = 1.0f - CndD1; } float K2 = 1.0f / (1.0f + 0.2316419f * Math.Abs(d2)); float CndD2 = RSQRT2PI * (float)Math.Exp(-0.5f * d2 * d2) * (K2 * (A1 + K2 * (A2 + K2 * (A3 + K2 * (A4 + K2 * A5))))); if (d2 > 0) { CndD2 = 1.0f - CndD2; } // Calculate the discount rate float ExpRT = (float)Math.Exp(-1.0f * riskFree * t); // Calculate the values of the call and put options callResult[OptionIndex] = s * CndD1 - x * ExpRT * CndD2; putResult[OptionIndex] = x * ExpRT * (1.0f - CndD2) - s * (1.0f - CndD1); }) Link To Source File
GPU.NET-Example-Projects VectorOps.cs // Loop over the vectors 'a' and 'b', adding them pairwise and storing the sums in 'c' Parallel.For(0, a.Length, ElementIndex => { c[ElementIndex] = a[ElementIndex] + b[ElementIndex]; }) Link To Source File
GPU.NET-Example-Projects VectorOps.cs // Loop over the vectors 'a' and 'b', adding them pairwise and storing the sums in 'c' Parallel.For(0, a.Length, ElementIndex => { c[ElementIndex] = a[ElementIndex] + b[ElementIndex]; }) Link To Source File
GPU.NET-Example-Projects VectorOps.cs // Loop over the vectors 'a' and 'b', adding them pairwise and storing the sums in 'c' Parallel.For(0, a.Length, ElementIndex => { c[ElementIndex] = a[ElementIndex] + b[ElementIndex]; }) Link To Source File
GPU.NET-Example-Projects VectorOps.cs // Loop over the vectors 'a' and 'b', adding them pairwise and storing the sums in 'c' Parallel.For(0, a.Length, ElementIndex => { c[ElementIndex] = a[ElementIndex] + b[ElementIndex]; }) Link To Source File
GPU.NET-Example-Projects VectorOps.cs // Loop over the vectors 'a' and 'b', multiplying them pairwise and storing the products in 'c' Parallel.For(0, a.Length, ElementIndex => { c[ElementIndex] = a[ElementIndex] * b[ElementIndex]; }) Link To Source File
GPU.NET-Example-Projects VectorOps.cs // Loop over the vectors 'a' and 'b', multiplying them pairwise and storing the products in 'c' Parallel.For(0, a.Length, ElementIndex => { c[ElementIndex] = a[ElementIndex] * b[ElementIndex]; }) Link To Source File
GPU.NET-Example-Projects VectorOps.cs // Loop over the vectors 'a', 'b', and 'c'; multiply each pair of elements from 'a' and 'b', add the element from 'c' to the product, then store the result in 'd'. Parallel.For(0, a.Length, ElementIndex => { d[ElementIndex] = (a[ElementIndex] * b[ElementIndex]) + c[ElementIndex]; }) Link To Source File
GPU.NET-Example-Projects VectorOps.cs // Loop over the vectors 'a', 'b', and 'c'; multiply each pair of elements from 'a' and 'b', add the element from 'c' to the product, then store the result in 'd'. Parallel.For(0, a.Length, ElementIndex => { d[ElementIndex] = (a[ElementIndex] * b[ElementIndex]) + c[ElementIndex]; }) Link To Source File
GPU.NET-Example-Projects VectorOps.cs // Loop over the vectors 'a', 'b', and 'c'; multiply each pair of elements from 'a' and 'b', add the element from 'c' to the product, then store the result in 'd'. Parallel.For(0, a.Length, ElementIndex => { // Multiplying two integers could overflow, so we cast them to long *before* the multiplication so we'll get the full product. d[ElementIndex] = ((long)a[ElementIndex] * (long)b[ElementIndex]) + c[ElementIndex]; }) Link To Source File
GPU.NET-Example-Projects VectorOps.cs // Loop over the vectors 'a' and 'b', subtracting them pairwise and storing the sums in 'c' Parallel.For(0, a.Length, ElementIndex => { c[ElementIndex] = a[ElementIndex] - b[ElementIndex]; }) Link To Source File
GPU.NET-Example-Projects VectorOps.cs // Loop over the vectors 'a' and 'b', subtracting them pairwise and storing the sums in 'c' Parallel.For(0, a.Length, ElementIndex => { c[ElementIndex] = a[ElementIndex] - b[ElementIndex]; }) Link To Source File
GPU.NET-Example-Projects VectorOps.cs // Loop over the vectors 'a' and 'b', subtracting them pairwise and storing the sums in 'c' Parallel.For(0, a.Length, ElementIndex => { c[ElementIndex] = a[ElementIndex] - b[ElementIndex]; }) Link To Source File
GPU.NET-Example-Projects VectorOps.cs // Loop over the vectors 'a' and 'b', subtracting them pairwise and storing the sums in 'c' Parallel.For(0, a.Length, ElementIndex => { c[ElementIndex] = a[ElementIndex] - b[ElementIndex]; }) Link To Source File
GPU.NET-Example-Projects AsianOptionPricingEngineCPU.cs // Simulate the paths // NOTE : paths.GetLength(0) is the number of simulations we want to perform. Parallel.For(0, paths.GetLength(0), simulationIndex => { // Handle the path simulation for each simulation; don't nest Parallel.For(...) calls here, // because it'd only help in some special cases where we had a large number of timesteps and very small number of simulations. // Rudimentary -- but fast -- pseudo-RNG. // NOTE : This tends to produce a fairly poor distribution of numbers, so using it speeds up path generation // but slows down the convergence speed of the result. var SimpleRng = new Random(); // Cryptographically-strong -- but slow -- pseudo-RNG. //var CryptoRng = new System.Security.Cryptography.RNGCryptoServiceProvider(); // Compute the random price-path for the current simulation. int NumTimesteps = paths.GetLength(1); float s = 1.0f; for (int TimestepIndex = 0; TimestepIndex < NumTimesteps; TimestepIndex++) { s *= (float)Math.Exp(drift + (diffusion * (float)SimpleRng.NextDoubleNormal())); //s *= (float)Math.Exp(drift + (diffusion * (float)CryptoRng.NextDoubleNormal())); paths[simulationIndex, TimestepIndex] = s; } }) Link To Source File
GPU.NET-Example-Projects AsianOptionPricingEngineCPU.cs // Parallel.For(0, NumSimulations, simulationIndex => { // Compute the arithmatic average price of the option along the simulated path. // This is done by averaging the random change at each step in the path, then multiplying this by the option's spot price (what the option is valued at before traversing the random path). int NumTimesteps = paths.GetLength(1); float PathSum = 0f; for (int TimestepIndex = 0; TimestepIndex < NumTimesteps; TimestepIndex++) { PathSum += paths[simulationIndex, TimestepIndex]; } float avg = PathSum * optionSpotPrice / NumTimesteps; // Compute the payoff and store it into the payoff array. Payoffs[simulationIndex] = Math.Max(0f, optionType == OptionType.Call ? avg - optionStrikePrice : optionStrikePrice - avg); }) Link To Source File
dwarrowdelf RenderViewSimple.cs // Note: we cannot access WPF stuff from different threads Parallel.For(0, rows, y => { for (int x = 0; x < columns; ++x) { var p = new IntPoint2(x, y); int idx = m_renderData.GetIdx(x, y); if (m_renderData.Grid[idx].IsValid) continue; var ml = new IntPoint3(offsetX + x, offsetY + (rows - y - 1), offsetZ); Resolve(out m_renderData.Grid[idx], this.Environment, ml, m_showVirtualSymbols, isSeeAll); } }) Link To Source File
dwarrowdelf RenderViewDetailed.cs // Note: we cannot access WPF stuff from different threads Parallel.For(0, rows, y => { int idx = m_renderData.GetIdx(0, y); for (int x = 0; x < columns; ++x, ++idx) { if (m_renderData.Grid[idx].IsValid) continue; var ml = RenderDataLocationToMapLocation(x, y); ResolveDetailed(out m_renderData.Grid[idx], this.Environment, ml, m_showVirtualSymbols, isSeeAll); } }) Link To Source File
dwarrowdelf WorldCreator.cs Parallel.For(0, env.Height, y => { for (int x = 0; x < env.Width; ++x) { int surface = heightMap[x, y]; for (int z = 0; z < env.Depth; ++z) { var p = new IntPoint3(x, y, z); var td = new TileData(); td.InteriorID = InteriorID.Empty; td.InteriorMaterialID = MaterialID.Undefined; if (z < surface) { td.TerrainID = TerrainID.NaturalWall; td.TerrainMaterialID = MaterialID.Granite; } else if (z == surface) { td.TerrainID = TerrainID.NaturalFloor; td.TerrainMaterialID = MaterialID.Granite; if (z < GRASS_LIMIT) td.Flags = TileFlags.Grass; } else { td.TerrainID = TerrainID.Empty; td.TerrainMaterialID = MaterialID.Undefined; } env.SetTileData(p, td); } } }) Link To Source File
dwarrowdelf PlayerVision.cs Parallel.For(0, bounds.Height, y => { for (int x = 0; x < bounds.Width; ++x) { var p = new IntPoint3(x, y, z); var vis = EnvironmentHelpers.CanSeeThrough(env, p) || EnvironmentHelpers.CanBeSeen(env, p); if (vis) { lvlIsHidden = false; m_visibilityArray[p.Z, p.Y, p.X] = true; } } }) Link To Source File
xna-morijobi-win my_space.cs // for(var ia = 0; ia < c; ++ia) Parallel.For(0, c, ia => { for (var ib = ia + 1; ib < c; ++ib){ var a = ss[ia]; var b = ss[ib]; if (a.bounding.Intersects(b.bounding)) { //if (a.mass > b.mass) // removes.Enqueue(b); //else // removes.Enqueue(a); removes.Enqueue((a.mass > b.mass) ? b : a); se_collision.Play(); } } }) Link To Source File
SourceBit.ClientCompressor FactoryTests.cs Parallel.For(0, 100, (index) => Assert.That(Factory.Container, Is.EqualTo(Factory.Container))) Link To Source File
lucene.net TestSupportClass.cs System.Threading.Tasks.Parallel.For(0, LoopCount, (i) => { analyzers[i] = new Lucene.Net.Analysis.Standard.StandardAnalyzer(); dirs[i] = new RAMDirectory(); indexWriters[i] = new IndexWriter(dirs[i], analyzers[i], true); }) Link To Source File
lucene.net TestSupportClass.cs System.Threading.Tasks.Parallel.For(0, LoopCount, (i) => { Document document = new Document(); document.Add(new Field("field", "some test", Field.Store.NO, Field.Index.ANALYZED)); indexWriters[i].AddDocument(document); }) Link To Source File
lucene.net TestSupportClass.cs System.Threading.Tasks.Parallel.For(0, LoopCount, (i) => { analyzers[i].Close(); indexWriters[i].Close(); }) Link To Source File
lucene.net TestSupportClass.cs System.Threading.Tasks.Parallel.For(0, LoopCount, (i) => { IndexSearcher searcher = new IndexSearcher(dirs[i]); TopDocs d = searcher.Search(new TermQuery(new Term("field", "test")), 10); searcher.Close(); }) Link To Source File
lucene.net TestSupportClass.cs System.Threading.Tasks.Parallel.For(0, LoopCount, (i) => dirs[i].Close()) Link To Source File
movielens Computer.cs Parallel.For(i + 1, this.movies.Count, j => { if (this.movies[i].Id != this.movies[j].Id) { var ret = sim.ItemSimilarityScore(this.movies[i], this.movies[j]); results.Add(new Result { MovieId = this.movies[j].Id, Score = ret }); } }) Link To Source File
MyMediaLite ItemsCrossValidation.cs Parallel.For(0, (int) split.NumberOfFolds, fold => { try { var split_recommender = (ItemRecommender) recommender.Clone(); // avoid changes in recommender split_recommender.Feedback = split.Train[fold]; split_recommender.Train(); var fold_results = Items.Evaluate(split_recommender, split.Train[fold], split.Test[fold], test_users, candidate_items, candidate_item_mode); if (compute_fit) fold_results["fit"] = (float) split_recommender.ComputeFit(); // thread-safe stats lock (avg_results) foreach (var key in fold_results.Keys) if (avg_results.ContainsKey(key)) avg_results[key] += fold_results[key]; else avg_results[key] = fold_results[key]; if (show_results) Console.Error.WriteLine("fold {0} {1}", fold, fold_results); } catch (Exception e) { Console.Error.WriteLine("===> ERROR: " + e.Message + e.StackTrace); throw e; } }) Link To Source File
MyMediaLite ItemsCrossValidation.cs // initial training and evaluation Parallel.For(0, (int) split.NumberOfFolds, i => { try { split_recommenders[i] = (ItemRecommender) recommender.Clone(); // to avoid changes in recommender split_recommenders[i].Feedback = split.Train[i]; split_recommenders[i].Train(); iterative_recommenders[i] = (IIterativeModel) split_recommenders[i]; var fold_results = Items.Evaluate(split_recommenders[i], split.Test[i], split.Train[i], test_users, candidate_items, candidate_item_mode, repeated_events); Console.WriteLine("fold {0} {1} iteration {2}", i, fold_results, iterative_recommenders[i].NumIter); } catch (Exception e) { Console.Error.WriteLine("===> ERROR: " + e.Message + e.StackTrace); throw e; } }) Link To Source File
MyMediaLite ItemsCrossValidation.cs Parallel.For(0, (int) split.NumberOfFolds, i => { try { iterative_recommenders[i].Iterate(); if (it % find_iter == 0) { var fold_results = Items.Evaluate(split_recommenders[i], split.Test[i], split.Train[i], test_users, candidate_items, candidate_item_mode, repeated_events); Console.WriteLine("fold {0} {1} iteration {2}", i, fold_results, it); } } catch (Exception e) { Console.Error.WriteLine("===> ERROR: " + e.Message + e.StackTrace); throw e; } }) Link To Source File
MyMediaLite RatingsCrossValidation.cs Parallel.For(0, (int) split.NumberOfFolds, i => { try { var split_recommender = (RatingPredictor) recommender.Clone(); // to avoid changes in recommender split_recommender.Ratings = split.Train[i]; split_recommender.Train(); var fold_results = Ratings.Evaluate(split_recommender, split.Test[i]); if (compute_fit) fold_results["fit"] = (float) split_recommender.ComputeFit(); // thread-safe stats lock (avg_results) foreach (var key in fold_results.Keys) if (avg_results.ContainsKey(key)) avg_results[key] += fold_results[key]; else avg_results[key] = fold_results[key]; if (show_results) Console.Error.WriteLine("fold {0} {1}", i, fold_results); } catch (Exception e) { Console.Error.WriteLine("===> ERROR: " + e.Message + e.StackTrace); throw e; } }) Link To Source File
MyMediaLite RatingsCrossValidation.cs // initial training and evaluation Parallel.For(0, (int) split.NumberOfFolds, i => { try { split_recommenders[i] = (RatingPredictor) recommender.Clone(); // to avoid changes in recommender split_recommenders[i].Ratings = split.Train[i]; split_recommenders[i].Train(); iterative_recommenders[i] = (IIterativeModel) split_recommenders[i]; var fold_results = Ratings.Evaluate(split_recommenders[i], split.Test[i]); Console.WriteLine("fold {0} {1} iteration {2}", i, fold_results, iterative_recommenders[i].NumIter); } catch (Exception e) { Console.Error.WriteLine("===> ERROR: " + e.Message + e.StackTrace); throw e; } }) Link To Source File
MyMediaLite RatingsCrossValidation.cs Parallel.For(0, (int) split.NumberOfFolds, i => { try { iterative_recommenders[i].Iterate(); if (it % find_iter == 0) { var fold_results = Ratings.Evaluate(split_recommenders[i], split.Test[i]); Console.WriteLine("fold {0} {1} iteration {2}", i, fold_results, it); } } catch (Exception e) { Console.Error.WriteLine("===> ERROR: " + e.Message + e.StackTrace); throw e; } }) Link To Source File
MyMediaLite BiasedMatrixFactorization.cs Parallel.For(0, MaxThreads, j => Iterate(thread_blocks[j, (i + j) % MaxThreads], true, true)) Link To Source File