Ramon Béjar před 3 roky
rodič
revize
a251488a81
28 změnil soubory, kde provedl 3038 přidání a 0 odebrání
  1. 44
    0
      distributed_social_network_analyzer/README.md
  2. 10
    0
      distributed_social_network_analyzer/distributed-analyzer.sbt
  3. 3
    0
      distributed_social_network_analyzer/scripts/solveXMLconversation.sh
  4. 17
    0
      distributed_social_network_analyzer/src/main/scala/DownloaderManagers/RedditDownloader.scala
  5. 27
    0
      distributed_social_network_analyzer/src/main/scala/DownloaderManagers/TwitterDownloader.scala
  6. 59
    0
      distributed_social_network_analyzer/src/main/scala/Graph/RedditGraph.scala
  7. 364
    0
      distributed_social_network_analyzer/src/main/scala/Graph/SocialGraph.scala
  8. 148
    0
      distributed_social_network_analyzer/src/main/scala/Graph/TwitterGraph.scala
  9. 78
    0
      distributed_social_network_analyzer/src/main/scala/Graph/TwitterGraphFromXML.scala
  10. 220
    0
      distributed_social_network_analyzer/src/main/scala/GraphCalculous/GraphRepresentations.scala
  11. 264
    0
      distributed_social_network_analyzer/src/main/scala/GraphCalculous/RelationshipsCalcs.scala
  12. 79
    0
      distributed_social_network_analyzer/src/main/scala/Learning/MP/MPClassifier.scala
  13. 101
    0
      distributed_social_network_analyzer/src/main/scala/Learning/MP/MPTraining.scala
  14. 25
    0
      distributed_social_network_analyzer/src/main/scala/Learning/MachineLearning.scala
  15. 134
    0
      distributed_social_network_analyzer/src/main/scala/Learning/OtherModels/DecisionTreeModelCustom.scala
  16. 115
    0
      distributed_social_network_analyzer/src/main/scala/Learning/OtherModels/Examples/ExampleFlights.scala
  17. 111
    0
      distributed_social_network_analyzer/src/main/scala/Learning/OtherModels/NaiveBayesModelCustom.scala
  18. 37
    0
      distributed_social_network_analyzer/src/main/scala/MainApp.scala
  19. 49
    0
      distributed_social_network_analyzer/src/main/scala/MainAppFromXML.scala
  20. 22
    0
      distributed_social_network_analyzer/src/main/scala/ParserCaller/ParserCaller.scala
  21. 226
    0
      distributed_social_network_analyzer/src/main/scala/Tests/BasicTests.scala
  22. 185
    0
      distributed_social_network_analyzer/src/main/scala/Tests/DistributedGraphTests.scala
  23. 46
    0
      distributed_social_network_analyzer/src/main/scala/Tests/GraphMPCTests.scala
  24. 100
    0
      distributed_social_network_analyzer/src/main/scala/Tests/SerializationTests.scala
  25. 480
    0
      smallexamples/Deguindos-c5-logweights.xml
  26. 4
    0
      smallexamples/Deguindos_c5.xml
  27. 77
    0
      smallexamples/PardoYRato-logweights.xml
  28. 13
    0
      smallexamples/Trillo_c8.xml

+ 44
- 0
distributed_social_network_analyzer/README.md Zobrazit soubor

@@ -0,0 +1,44 @@
1
+# Distributed Social Network Analyzer #
2
+
3
+### Requirements ###
4
+
5
+Required sbt version: 1.4.6
6
+- Java 11
7
+- Spark 3.0.1 (with GraphX) : https://spark.apache.org/releases/spark-release-3-0-1.html
8
+- sbt project uses scala 2.12.2
9
+
10
+You can get help [here](http://www.scala-sbt.org/download.html)
11
+
12
+
13
+
14
+### Compiling ###
15
+
16
+Go to the project folder and execute
17
+```
18
+$sbt package
19
+```
20
+
21
+### Executing ###
22
+
23
+The main way to execute the project in Apache Spark after compiling:
24
+
25
+
26
+```
27
+$ spark-submit --master local[*] --class "MainAppFromXML" target/scala-2.12/social-network-analyzer_2.12-1.0.jar  xmlfilewithconversationgraph
28
+```
29
+
30
+
31
+Or use the script found in scripts/solveXMLconversation.sh
32
+
33
+
34
+# GraphX Pregel API
35
+
36
+## Doc:
37
+
38
+https://spark.apache.org/docs/latest/graphx-programming-guide.html#pregel-api
39
+
40
+## Examples:
41
+
42
+ spark-2.2.1-bin-hadoop2.7/examples/src/main/scala/org/apache/spark/examples/graphx/
43
+
44
+

+ 10
- 0
distributed_social_network_analyzer/distributed-analyzer.sbt Zobrazit soubor

@@ -0,0 +1,10 @@
1
+name := "Social Network Analyzer"
2
+
3
+version := "1.0"
4
+
5
+scalaVersion := "2.12.2"
6
+
7
+libraryDependencies += "org.apache.spark" %% "spark-core" % "3.0.1"
8
+libraryDependencies += "org.apache.spark" %% "spark-graphx" % "3.0.1"
9
+libraryDependencies += "org.json4s" %% "json4s-jackson" % "3.2.11"
10
+libraryDependencies += "org.apache.spark" %% "spark-mllib" % "3.0.1"

+ 3
- 0
distributed_social_network_analyzer/scripts/solveXMLconversation.sh Zobrazit soubor

@@ -0,0 +1,3 @@
1
+#!/bin/bash
2
+# Pass name of XML file with discussion graph, without .xml suffix
3
+spark-submit --master local[4] --class "MainAppFromXML" target/scala-2.12/social-network-analyzer_2.12-1.0.jar $1

+ 17
- 0
distributed_social_network_analyzer/src/main/scala/DownloaderManagers/RedditDownloader.scala Zobrazit soubor

@@ -0,0 +1,17 @@
1
+//SparkContext
2
+import org.apache.spark.SparkContext
3
+import org.apache.spark.SparkConf
4
+
5
+class RedditDownloader (context: SparkContext){
6
+  def getGraphFromConversationId(id:String, json_file:String) : RedditGraph = {
7
+    val resultGraph : RedditGraph = new RedditGraph(context)
8
+    val scriptPath = "Downloaders/reddit-collector/reddit.py"
9
+    val scriptName = "reddit.py"
10
+    context.addFile(scriptPath)
11
+    val data = context.parallelize(List(id))
12
+    val pipeRDD = data.pipe(scriptPath)
13
+    pipeRDD.collect().foreach(println)
14
+    resultGraph.parseRedditConversationFromFile(json_file)
15
+    resultGraph
16
+  }
17
+}

+ 27
- 0
distributed_social_network_analyzer/src/main/scala/DownloaderManagers/TwitterDownloader.scala Zobrazit soubor

@@ -0,0 +1,27 @@
1
+//SparkContext
2
+import org.apache.spark.SparkContext
3
+import org.apache.spark.SparkConf
4
+
5
+class TwitterDownloader (context: SparkContext){
6
+  // def getGraphFromConversationId(id:String) : TwitterGraph = {
7
+  //   val resultGraph : TwitterGraph = new TwitterGraph(context)
8
+  //   val scriptPath = "Downloaders/twitter-collector/twcollect.py"
9
+  //   val scriptName = "twcollect.py"
10
+  //   context.addFile(scriptPath)
11
+  //   val data = context.parallelize(List(id))
12
+  //   val pipeRDD = data.pipe(scriptPath)
13
+  //   pipeRDD.collect()
14
+  //   resultGraph.parseConversationFromFile("Storage/Jsons/" + id + ".json")
15
+  //   resultGraph
16
+  // }
17
+
18
+  def downloadAndStoreConversation(id: String) : String = {
19
+    val scriptPath = "Downloaders/twitter-collector/twcollect.py"
20
+    val scriptName = "twcollect.py"
21
+    context.addFile(scriptPath)
22
+    val data = context.parallelize(List(id))
23
+    val pipeRDD = data.pipe(scriptPath)
24
+    pipeRDD.collect()
25
+    "Storage/Jsons/" + id + ".json"
26
+  }
27
+}

+ 59
- 0
distributed_social_network_analyzer/src/main/scala/Graph/RedditGraph.scala Zobrazit soubor

@@ -0,0 +1,59 @@
1
+//SparkContext
2
+import org.apache.spark.SparkContext
3
+import org.apache.spark.SparkConf
4
+
5
+//Json4s
6
+import org.json4s._
7
+import org.json4s.jackson.JsonMethods._
8
+
9
+//GraphX
10
+import org.apache.spark._
11
+import org.apache.spark.graphx._
12
+import org.apache.spark.rdd.RDD
13
+
14
+//Lists
15
+import scala.collection.mutable.ListBuffer
16
+
17
+/***Summary***
18
+RedditGraph is the specific parser for reddit, it takes a Reddit
19
+conversation or a specific reddit jsons (obtained directly from reddit API)
20
+and SetUp a SocialGraph with this information.
21
+*************/
22
+
23
+class RedditGraph(context: SparkContext) extends SocialGraph(context){
24
+
25
+  def parseRedditConversationFromFile(filePath: String){
26
+    val file = scala.io.Source.fromFile(filePath)
27
+    val lines = file.getLines()
28
+    for (line <- lines){
29
+      parseRedditMessage(line)
30
+    }
31
+    file.close()
32
+  }
33
+
34
+  def parseRedditConversation(jsonConversation: String){
35
+    val conversation = parse(jsonConversation).extract[Reddit_Conversation_Messages]
36
+    val conversation_properties = parse(jsonConversation).extract[Reddit_Conversation_Properties]
37
+    for(i <- 0 until conversation.messages.size){
38
+      val l_message = conversation.messages(i)
39
+      val l_properties = conversation_properties.messages(i)
40
+      println("Message readed:"+
41
+                            "\n\tId:"       + l_message.id      +
42
+                            "\n\tAuthor: "  + l_message.author_name  +
43
+                            "\n\tMessage: " + l_message.text)
44
+      addMessage(l_message.id, l_message.author_name, l_message.text, l_properties)
45
+      if(l_message.parent != -1){
46
+        addConnection(l_message.id, l_message.parent, "noone") //TODO: Add real intention
47
+      }
48
+    }
49
+  }
50
+
51
+  def parseRedditMessage(jsonMessage: String){
52
+    parseRedditConversation("{\"messages\":["+jsonMessage+"]}")
53
+  }
54
+
55
+}
56
+
57
+case class Reddit_Temp(id: Long, author_name: String, text: String, parent : Long)
58
+case class Reddit_Conversation_Messages(messages: List[Reddit_Temp])
59
+case class Reddit_Conversation_Properties(messages: List[Map[String, Any]])

+ 364
- 0
distributed_social_network_analyzer/src/main/scala/Graph/SocialGraph.scala Zobrazit soubor

@@ -0,0 +1,364 @@
1
+//SparkContext
2
+import org.apache.spark.SparkContext
3
+import org.apache.spark.SparkConf
4
+
5
+//GraphX
6
+import org.apache.spark._
7
+import org.apache.spark.graphx._
8
+import org.apache.spark.rdd.RDD
9
+
10
+//Json4s
11
+import org.json4s._
12
+import org.json4s.jackson.JsonMethods._
13
+import org.json4s.jackson.Serialization
14
+
15
+//Lists
16
+import scala.collection.mutable.ListBuffer
17
+// import scala.collection.mutable
18
+
19
+//Serialization
20
+import java.io._
21
+
22
+
23
+
24
+/***Summary***
25
+JsonGraph it's and abstraction for Graph creation, allowing creating messages
26
+and connections that could be added directly or parsed from a Json
27
+
28
+Every message must contain an identifier an author, a message and a Json filled
29
+with properties.
30
+
31
+Every connection will represent the edge between two messages, represented by
32
+source and destination identifiers and the kind of relation between them.
33
+************/
34
+
35
+class SocialGraph(context: SparkContext){
36
+    var m_context = context
37
+    var m_messages : Map[Long, Message] = Map() //ID -> Message
38
+    var m_connections : Map[(Long, Long), Edge[String]] = Map() //(From, To) -> Edge
39
+    var g_graph : Option[Graph[Message, String]] = None
40
+    var subgraph_indegrees = scala.collection.mutable.Map[Long, Int]() //   Map IDvertex to its INdegree in subgraph
41
+    var subgraph_defindegrees = scala.collection.mutable.Map[Long, Int]() //   Map IDvertex to its INdegree in subgraph
42
+    val m_representations = new GraphRepresentations(this, context)
43
+    var listofdiscnodes =  new ListBuffer[String]()
44
+
45
+    implicit val formats = DefaultFormats
46
+
47
+    def getSubGraph(attack: Boolean , support: Boolean, noone: Boolean) : SocialGraph = {
48
+      var resultGraph : SocialGraph = new SocialGraph(context)
49
+      var subGraphMessages : Map[Long, Message] = Map()
50
+      var sweight : Float = 0;
51
+      var dweight : Float = 0;
52
+
53
+      listofdiscnodes =  new ListBuffer[String]()
54
+      // Initialize degrees
55
+      resultGraph.subgraph_indegrees = scala.collection.mutable.Map[Long, Int]()
56
+      resultGraph.subgraph_defindegrees = scala.collection.mutable.Map[Long, Int]()
57
+      for (msg <- m_messages.values.toList) {
58
+        resultGraph.subgraph_indegrees += (msg.id -> 0)
59
+        resultGraph.subgraph_defindegrees += (msg.id -> 0)
60
+      }
61
+
62
+       //Look for desired connections
63
+       for (connection <- m_connections.values.toList){
64
+        if((attack && connection.attr == "attack") ||
65
+          (support && connection.attr == "support") ||
66
+          (noone && connection.attr == "noone")){
67
+            println( "Connection :"+connection.srcId.toString()+ " -> "+connection.dstId.toString())
68
+            val srcMessage : Message = m_messages(connection.srcId)
69
+            val dstMessage : Message = m_messages(connection.dstId)
70
+            sweight = srcMessage.weight
71
+            dweight = dstMessage.weight
72
+            if(!subGraphMessages.contains(connection.srcId)){
73
+
74
+              subGraphMessages += (connection.srcId -> srcMessage)
75
+              resultGraph.addMessage(srcMessage.id, srcMessage.author, srcMessage.message,
76
+              srcMessage.properties, srcMessage.weight)
77
+              // println( "Adding "+srcMessage.id.toString() )
78
+
79
+            }
80
+            if(!subGraphMessages.contains(connection.dstId)){
81
+
82
+              subGraphMessages += (connection.dstId -> dstMessage)
83
+              resultGraph.addMessage(dstMessage.id, dstMessage.author, dstMessage.message,
84
+              dstMessage.properties, dstMessage.weight)
85
+              // println( "Adding "+dstMessage.id.toString() )
86
+
87
+            }
88
+            resultGraph.subgraph_indegrees(connection.dstId) = resultGraph.subgraph_indegrees(connection.dstId) + 1
89
+            if (sweight.>=( dweight )) {
90
+              resultGraph.subgraph_defindegrees(connection.dstId) = resultGraph.subgraph_defindegrees(connection.dstId) + 1
91
+               println( "Effective : "+connection.srcId+"-"+sweight+ " -> "+connection.dstId+"-"+dweight)
92
+            } else {
93
+                 println( "NOT Effective : "+connection.srcId+"-"+sweight+ " -> "+connection.dstId+"-"+dweight)
94
+              }
95
+            resultGraph.addConnection(connection.srcId, connection.dstId, connection.attr)
96
+        }
97
+      }
98
+      var ndis : Int = 0;
99
+      // subGraphMessages.foreach(println)
100
+      for (msg <- m_messages.values.toList) {
101
+         if (!subGraphMessages.contains(msg.id)) {
102
+           val idstr = msg.id.toString()
103
+           // println(  " Node desconnected from graph: " + idstr  )
104
+           listofdiscnodes += idstr
105
+           ndis = ndis + 1;
106
+        }
107
+      }
108
+      //for (msg <- m_messages.values.toList) {
109
+
110
+        //   println(  " Node  "+ msg.id.toString() +" indegre: " + resultGraph.subgraph_indegrees(msg.id).toString())
111
+
112
+      // }
113
+      println( "Total disconnected : "+ndis.toString() )
114
+      resultGraph
115
+    }
116
+
117
+    def getListOfDisconnectedNodes() : ListBuffer[String] = {
118
+      listofdiscnodes
119
+    }
120
+
121
+    def updatePunctuation(id : Long, newPunctuation : Float){
122
+      m_messages(id).weight = newPunctuation
123
+    }
124
+
125
+    def storeGraph(directory : String, graphName : String){
126
+      //Create files
127
+      val file_messages = new File(directory + graphName + ".mes")
128
+      val file_connections = new File(directory + graphName + ".con")
129
+      val writer_messages = new BufferedWriter(new FileWriter(file_messages))
130
+      val writer_connections = new BufferedWriter(new FileWriter(file_connections))
131
+      var exp_json : String = ""
132
+      var map_json : String = ""
133
+
134
+      for (message <- m_messages.values.toList){
135
+
136
+          exp_json += "{\"id\":"        + message.id              + ","   +
137
+                      "\"weight\":"     + message.weight          + ","   +
138
+                      "\"author\":\""   + message.author          + "\"," +
139
+                      "\"message\":\""  + message.message.replace("\"", "\\\"").replace("\n", " ") + "\"," +
140
+                      "\"properties\":" + Serialization.write(message.properties)  + "}"
141
+          writer_messages.write(exp_json)
142
+          exp_json = "\n"
143
+      }
144
+
145
+      exp_json = ""
146
+      for (connection <- m_connections.values.toList){
147
+        exp_json += "{\"origin\":"     + connection.srcId     + "," +
148
+                   "\"destination\":" + connection.dstId      + "," +
149
+                   "\"intention\":\"" + connection.attr       + "\"}"
150
+        writer_connections.write(exp_json)
151
+        exp_json = "\n"
152
+      }
153
+
154
+      //Close files
155
+      writer_messages.close()
156
+      writer_connections.close()
157
+    }
158
+
159
+    def loadGraph(directory : String, graphName : String){
160
+      //Open files
161
+      val baseFilename = directory + graphName
162
+      val file_messages = scala.io.Source.fromFile(baseFilename + ".mes")
163
+      val file_connections = scala.io.Source.fromFile(baseFilename + ".con")
164
+
165
+      //Load messages
166
+      var lines = file_messages.getLines()
167
+      var message : String = ""
168
+      var count : Int = 0
169
+
170
+      for (line <- lines){
171
+          parseMessage(line)
172
+          count += 1
173
+      }
174
+
175
+      //Load connections
176
+      lines = file_connections.getLines()
177
+      for (line <- lines){
178
+        parseConnection(line)
179
+      }
180
+
181
+      //Close files
182
+      file_messages.close()
183
+      file_connections.close()
184
+
185
+    }
186
+
187
+    def makeGraph(){
188
+      //Create vertex
189
+      val g_vertex : RDD[(VertexId, Message)] = m_context.parallelize(messagesVertexFormat())
190
+      //Create edges
191
+      val g_edges : RDD[Edge[String]] = m_context.parallelize(m_connections.values.to[collection.immutable.Seq])
192
+      //Create default if no relationship
193
+      val defaultMap : Map[String, Any] = Map("Invalid" -> "Invalid")
194
+      val defaultRelation = new Message(-1L, 0, "Invalid", "Invalid", defaultMap)
195
+
196
+      //Build the graph
197
+      g_graph = Some(Graph(g_vertex, g_edges, defaultRelation))
198
+    }
199
+
200
+    def getGraph(): Graph[Message, String] = {
201
+      g_graph match{
202
+        case None => {
203
+          makeGraph()
204
+          getGraph()
205
+        }
206
+        case Some(s) => s
207
+      }
208
+    }
209
+
210
+    def addMessage(id: Long, author: String, message: String,
211
+       properties: Map[String, Any], weight : Float = -1){
212
+      m_messages += (id -> new Message(id, weight, author, message, properties))
213
+    }
214
+
215
+    def addConnection(origin: Long, destination: Long, intention: String){
216
+      m_connections += ((origin, destination) -> new Edge(origin, destination, intention))
217
+    }
218
+
219
+    /***Summary***
220
+    Calculates the max of the distances of the graph.
221
+    *************/
222
+    def maxDistance() : Int = {
223
+      val graph = m_representations.integerOnly(0)
224
+      val sssp = graph.pregel(0)(
225
+        (id, dist, newDist) => {
226
+          newDist
227
+        },
228
+        triplet => {
229
+          if(triplet.dstAttr <= triplet.srcAttr){
230
+            Iterator((triplet.dstId, triplet.srcAttr+1))
231
+          }
232
+          else{
233
+            Iterator.empty
234
+          }
235
+        },
236
+        (a,b) => {
237
+          if(a>b) a
238
+          else b
239
+        }
240
+      )
241
+      val distance = sssp.vertices.reduce((a,b) => {
242
+        if(a._2 > b._2) a
243
+        else b
244
+      })
245
+      println(sssp.vertices.collect.mkString("\n"))
246
+      distance._2
247
+    }
248
+
249
+    /***Summary***
250
+    Calculates the min of the distances of the graph.
251
+    *************/
252
+    def minDistance() : Int = {
253
+      val graph = m_representations.integerBool(0, true)
254
+      val sssp = graph.pregel((0, true))(
255
+        (id, dist, newDist) => {
256
+          if(dist._1 == newDist._1 && dist._1 != 0){
257
+            (dist._1, false)
258
+          }
259
+          else{
260
+            newDist
261
+          }
262
+        },
263
+        triplet => {
264
+          if(triplet.dstAttr._2){
265
+            Iterator((triplet.dstId, (triplet.srcAttr._1+1, true)))
266
+          }
267
+          else{
268
+            Iterator.empty
269
+          }
270
+        },
271
+        (a,b) => {
272
+          if(a._1 < b._1) a
273
+          else b
274
+        }
275
+      )
276
+      val distance = sssp.vertices.reduce((a,b) => {
277
+        if(a._2._1 > b._2._1) a
278
+        else b
279
+      })
280
+      println(sssp.vertices.collect.mkString("\n"))
281
+      distance._2._1
282
+    }
283
+
284
+    /***Summary***
285
+    Calculates the average of the distances of the graph.
286
+    *************/
287
+    def averageDistance() : Float = {
288
+
289
+      //First acquire the summ of all the distances
290
+      val graph = m_representations.integerIntegerBool()
291
+      val sssp = graph.pregel((-1, -1, true))(
292
+        (id, dist, newDist) => {
293
+          if(newDist._1 == -1){
294
+            (0,0,true)
295
+          }
296
+          else{
297
+            if(dist._1 == newDist._1 && dist._2 == newDist._2){
298
+              (dist._1, dist._2, false)
299
+            }
300
+            else{
301
+              newDist
302
+            }
303
+          }
304
+        },
305
+        triplet => {
306
+          if(triplet.dstAttr._3){
307
+            if(triplet.srcAttr._1 == 0){
308
+              Iterator((triplet.dstId, (1, 1, true)))
309
+            }
310
+            else{
311
+              Iterator((triplet.dstId, (triplet.srcAttr._1+triplet.srcAttr._2, triplet.srcAttr._2, true)))
312
+            }
313
+          }
314
+          else{
315
+            Iterator.empty
316
+          }
317
+        },
318
+        (a,b) => {
319
+          (a._1+b._1, a._2+b._2, true)
320
+        }
321
+      )
322
+      val distance = sssp.vertices.reduce((a,b) => {
323
+        if(a._2._1 > b._2._1) a
324
+        else b
325
+      })
326
+      //println(sssp.vertices.collect.mkString("\n"))
327
+      //Divide total distances between all the leafs.
328
+      val leafs = (graph.vertices.count - graph.inDegrees.count)
329
+      //println("Total leafs: " + leafs)
330
+      //println("Total distance " + distance._2._1)
331
+      val avg : Float = (distance._2._1.toFloat/leafs.toFloat)
332
+      //Return average
333
+      avg
334
+    }
335
+
336
+    def getMessages(): Map[Long, Message] = m_messages
337
+
338
+    def getConnections(): Map[(Long, Long), Edge[String]] = m_connections
339
+
340
+    def getRepresentation(): GraphRepresentations = m_representations
341
+
342
+    def parseMessage(messageJson : String){
343
+      val parsedMessage : Message = parse(messageJson).extract[Message]
344
+      m_messages += (parsedMessage.id -> parsedMessage)
345
+    }
346
+
347
+    def parseConnection(connectionJson : String){
348
+      val conn = parse(connectionJson).extract[Connection]
349
+      m_connections += ((conn.origin, conn.destination) -> new Edge(conn.origin, conn.destination, conn.intention))
350
+    }
351
+
352
+    private def messagesVertexFormat() : Array[(VertexId, Message)] = {
353
+      val returnArray = new Array[(VertexId, Message)](m_messages.toList.length)
354
+      val messagesArray = m_messages.toList.toArray
355
+      for(i <- 0 until messagesArray.length){
356
+        returnArray(i) = (messagesArray(i)._1, messagesArray(i)._2)
357
+      }
358
+      returnArray
359
+    }
360
+
361
+}
362
+
363
+case class Message(id: Long, var weight: Float, author: String, message: String, properties: Map[String, Any])
364
+case class Connection (origin: Long, destination: Long, intention: String)

+ 148
- 0
distributed_social_network_analyzer/src/main/scala/Graph/TwitterGraph.scala Zobrazit soubor

@@ -0,0 +1,148 @@
1
+//SparkContext
2
+import org.apache.spark.SparkContext
3
+import org.apache.spark.SparkConf
4
+
5
+//Json4s
6
+import org.json4s._
7
+import org.json4s.jackson.JsonMethods._
8
+
9
+//GraphX
10
+import org.apache.spark._
11
+import org.apache.spark.graphx._
12
+import org.apache.spark.rdd.RDD
13
+
14
+//Lists
15
+import scala.collection.mutable.ListBuffer
16
+import scala.xml.XML
17
+
18
+/***Summary***
19
+TwitterGraph is the specific parser for twitter, it takes a Twitter
20
+conversation or a specific tweets jsons (obtained directly from twitter API)
21
+and SetUp a SocialGraph with this information.
22
+*************/
23
+
24
+/***Conversation Json***
25
+There are two main ways to parse a conversation:
26
+
27
+1st: Do it tweet by tweet: place on every line a json from a tweet from the
28
+conversation and read it line by line and parse them using parseTweet.
29
+
30
+2nd: Using a conversation json that must be composed on that way:
31
+{"tweets":[{jsonTweet1}, {jsonTweet2}, {jsonTweet3}]}
32
+Where a jsonTweet is a json of a tweet obtained directly from twitter API
33
+***********************/
34
+
35
+class TwitterGraph(context: SparkContext, classifier: MPC) extends SocialGraph(context){
36
+  var userReferences : Map[String, List[Long]] = Map()
37
+  var focusTo : Map[Long, Option[Long]] = Map().withDefaultValue(None)
38
+  var tweetAuthor : Map[Long, String] = Map()
39
+
40
+
41
+
42
+  def parseConversationFromFile(filePath: String){
43
+    val file = context.textFile(filePath).toLocalIterator.toArray
44
+    for (line <- file){
45
+      parseTweet(line)
46
+    }
47
+  }
48
+
49
+  def parseConversation(jsonConversation: String){
50
+    val conversation = parse(jsonConversation).extract[Twitter_Conversation_Tweets]
51
+    val conversation_properties = parse(jsonConversation).extract[Twitter_Conversation_Properties]
52
+    for(i <- 0 until conversation.tweets.size){
53
+      val l_tweet = conversation.tweets(i)
54
+      val l_properties = conversation_properties.tweets(i)
55
+      println("Tweet readed:" +
56
+              "\n\tId:"       + l_tweet.id      +
57
+              "\n\tAuthor: "  + l_tweet.user("screen_name").toString  +
58
+              "\n\tMessage: " + l_tweet.text)
59
+      addMessage(l_tweet.id, l_tweet.user("screen_name").toString,
60
+      l_tweet.text, l_properties, weightCalc(l_tweet.user("followers_count").asInstanceOf[Number].intValue,
61
+      l_tweet.retweet_count.toInt, l_tweet.favorite_count.toInt))
62
+      tweetAuthor += (l_tweet.id -> l_tweet.user("screen_name").toString)
63
+      println("Author of this tweet is: " + tweetAuthor(l_tweet.id))
64
+      makeLinks(l_tweet.id, l_properties, l_tweet.entities.user_mentions)
65
+    }
66
+  }
67
+
68
+
69
+  private def weightCalc(followers : Int, retweets : Int, favorites : Int) : Float = {
70
+    var sum : Double = followers
71
+    sum += 20 * retweets
72
+    sum += 40 * favorites
73
+    sum += 1
74
+    (scala.math.log(sum)/scala.math.log(2)).toFloat
75
+  }
76
+
77
+  def parseTweet(jsonTweet: String){
78
+    parseConversation("{\"tweets\":["+jsonTweet+"]}")
79
+  }
80
+
81
+  private def makeLinks(id: Long, info: Map[String, Any],
82
+    mentions: List[Map[String,Any]]){
83
+    //Make basic link
84
+    val reply_to = info("in_reply_to_status_id")
85
+    if(reply_to != null){
86
+      val l_reply_to:Long = reply_to.asInstanceOf[Number].longValue
87
+      focusTo += (id -> Some(l_reply_to))
88
+      println("Link info:"+
89
+              "\n\tOrigin: " + id +
90
+              "\n\tDestin: " + l_reply_to)
91
+      addConnection(id, l_reply_to, classifier.getRelationship(id,l_reply_to))
92
+      val mentionList : ListBuffer[String] = getMentionList(mentions)
93
+      println("Mention list: ")
94
+      for(mention <- mentionList){
95
+        println("\t-" + mention)
96
+      }
97
+      makeFullLinks(id, l_reply_to, mentionList)
98
+    }
99
+    else{
100
+      println("This is the root tweet")
101
+    }
102
+  }
103
+
104
+  private def makeFullLinks(id: Long, reply_to: Long,
105
+    mentions: ListBuffer[String]){
106
+    var nextLookFocus = reply_to
107
+    var focus: Option[Long] = None
108
+    for(mention <- mentions){
109
+      do {
110
+       focus = focusTo.getOrElse(nextLookFocus, None)
111
+       focus match{
112
+         case Some(s) => {
113
+           val currAuthor = tweetAuthor.getOrElse(s, None)
114
+           if(currAuthor != None && currAuthor == mention && reply_to != s){
115
+             println(println("Link info:"+
116
+                     "\n\tOrigin: " + id +
117
+                     "\n\tDestin: " + s))
118
+           }
119
+           addConnection(id, s, classifier.getRelationship(id, s))
120
+           nextLookFocus = s
121
+         }
122
+         case None => {
123
+           nextLookFocus = reply_to
124
+         }
125
+       }
126
+      }
127
+      while(focus != None)
128
+    }
129
+  }
130
+
131
+  private def getMentionList(mentions: List[Map[String, Any]]) : ListBuffer[String]= {
132
+    val mentionList: ListBuffer[String] = ListBuffer[String]()
133
+    for(mention <- mentions){
134
+      val name : String = mention("screen_name").toString
135
+      if(!mentionList.contains(name))
136
+        mentionList += name
137
+    }
138
+    mentionList.distinct
139
+    mentionList
140
+  }
141
+
142
+}
143
+
144
+case class Tweet_Temp(id: Long, user: Map[String, Any], text: String,
145
+  entities : Entities_Temp, retweet_count : Long, favorite_count : Long)
146
+case class Entities_Temp(user_mentions: List[Map[String, Any]])
147
+case class Twitter_Conversation_Tweets(tweets: List[Tweet_Temp])
148
+case class Twitter_Conversation_Properties(tweets: List[Map[String, Any]])

+ 78
- 0
distributed_social_network_analyzer/src/main/scala/Graph/TwitterGraphFromXML.scala Zobrazit soubor

@@ -0,0 +1,78 @@
1
+//SparkContext
2
+import org.apache.spark.SparkContext
3
+import org.apache.spark.SparkConf
4
+
5
+//Json4s
6
+import org.json4s._
7
+import org.json4s.jackson.JsonMethods._
8
+
9
+//GraphX
10
+import org.apache.spark._
11
+import org.apache.spark.graphx._
12
+import org.apache.spark.rdd.RDD
13
+
14
+//Lists
15
+import scala.collection.mutable.ListBuffer
16
+import scala.xml.XML
17
+
18
+
19
+
20
+/***Summary***
21
+TwitterGraph is the specific parser for twitter, it takes a Twitter
22
+conversation or a specific tweets jsons (obtained directly from twitter API)
23
+and SetUp a SocialGraph with this information.
24
+*************/
25
+
26
+/***Conversation Json***
27
+There are two main ways to parse a conversation:
28
+
29
+1st: Do it tweet by tweet: place on every line a json from a tweet from the
30
+conversation and read it line by line and parse them using parseTweet.
31
+
32
+2nd: Using a conversation json that must be composed on that way:
33
+{"tweets":[{jsonTweet1}, {jsonTweet2}, {jsonTweet3}]}
34
+Where a jsonTweet is a json of a tweet obtained directly from twitter API
35
+***********************/
36
+
37
+class TwitterGraphFromXML(context: SparkContext ) extends SocialGraph(context){
38
+  var userReferences : Map[String, List[Long]] = Map()
39
+  var focusTo : Map[Long, Option[Long]] = Map().withDefaultValue(None)
40
+  var tweetAuthor : Map[Long, String] = Map()
41
+
42
+
43
+
44
+
45
+  def getMeaningOfLabel(relation: String ): String = {
46
+    val meaning = relation match{
47
+      case "ATTACKS" => "attack"
48
+      case "SUPPORTS" => "support"
49
+      case _ => "noone"
50
+    }
51
+    return meaning
52
+  }
53
+
54
+
55
+  def parseXMLLabelledConversation( xmlLabelledConversation: String ) {
56
+     val xmlconv = XML.loadFile( xmlLabelledConversation )
57
+     // Parse the argument-list list of <arg > elements
58
+     val arguments = xmlconv \ "argument-list"
59
+     val listofarguments = arguments \ "arg"
60
+     for (arg <- listofarguments) {
61
+       //  println( (arg \ "@id").text + arg.text + (arg \ "@weight").text)
62
+       addMessage( (arg \ "@id").text.toLong, "anonymous",
63
+       arg.text, arg.attributes.asAttrMap, (arg \ "@weight").text.toFloat )
64
+     }
65
+     // Parse the argument pairs
66
+     val argpairs =  xmlconv \ "argument-pairs"
67
+     val listofargpairs = argpairs \ "pair"
68
+     for (apair <- listofargpairs) {
69
+       // println( apair )
70
+       val answer = ((apair \ "t") \"@id" ).text
71
+       val source = ((apair \ "h") \"@id" ).text
72
+       // println( answer + " -> " + source + " " + getMeaningOfLabel((apair \ "@entailment").text) )
73
+       addConnection(  answer.toLong, source.toLong, getMeaningOfLabel((apair \ "@entailment").text))
74
+     }
75
+  }
76
+
77
+
78
+}

+ 220
- 0
distributed_social_network_analyzer/src/main/scala/GraphCalculous/GraphRepresentations.scala Zobrazit soubor

@@ -0,0 +1,220 @@
1
+//SparkContext
2
+import org.apache.spark.SparkContext
3
+import org.apache.spark.SparkConf
4
+
5
+//GraphX
6
+import org.apache.spark._
7
+import org.apache.spark.graphx._
8
+import org.apache.spark.rdd.RDD
9
+
10
+//Lists
11
+import scala.collection.mutable.ListBuffer
12
+
13
+class GraphRepresentations(val originalGraph: SocialGraph,
14
+                           val context: SparkContext){
15
+
16
+    /***Summary***
17
+    This class provides different graph representations from SocialGraph
18
+    ************/
19
+
20
+    /**Summary**
21
+    Gives a Integer graph representation
22
+    with the same struct and connections of the original.
23
+    ***********/
24
+    def integerOnly(initValue : Integer): Graph[Int, String] = {
25
+      //Replicate vertices
26
+      var replicatedVertices : ListBuffer[(VertexId, Int)] =
27
+        new ListBuffer[(VertexId, Int)]
28
+      originalGraph.getMessages().map(a => replicatedVertices += ((a._1, initValue)))
29
+      val g_vertex : RDD[(VertexId, Int)] = context.parallelize(replicatedVertices)
30
+      //Replicate connections
31
+      val g_edges : RDD[Edge[String]] = context.parallelize(originalGraph.getConnections().values.to[collection.immutable.Seq])
32
+      //Build the graph
33
+      Graph(g_vertex, g_edges)
34
+    }
35
+
36
+    /**Summary**
37
+    Gives a Integer,Integer graph representation
38
+    with the same struct and connections of the original.
39
+    ***********/
40
+    def integerInteger(): Graph[(Int, Int), String] = {
41
+      //Replicate vertices
42
+      var replicatedVertices : ListBuffer[(VertexId, (Int, Int))] =
43
+        new ListBuffer[(VertexId, (Int, Int))]
44
+      originalGraph.getMessages().map(a => replicatedVertices += ((a._1, (0, 0))))
45
+      val g_vertex : RDD[(VertexId, (Int, Int))] = context.parallelize(replicatedVertices)
46
+      //Replicate connections
47
+      val g_edges : RDD[Edge[String]] = context.parallelize(originalGraph.getConnections().values.to[collection.immutable.Seq])
48
+      //Create default if no relationship
49
+      //val defaultRelation = 0
50
+      //Build the graph
51
+      Graph(g_vertex, g_edges)//, defaultRelation)
52
+    }
53
+
54
+    /**Summary**
55
+    Gives a Integer,Bool graph representation
56
+    with the same struct and connections of the original
57
+    in order to i.e. stop sending messages to a node marked as 'false'
58
+    in pregel.
59
+    ***********/
60
+    def integerBool(intVal:Integer, boolVal:Boolean): Graph[(Int, Boolean), String] = {
61
+      //Replicate vertices
62
+      var replicatedVertices : ListBuffer[(VertexId, (Int, Boolean))] =
63
+        new ListBuffer[(VertexId, (Int, Boolean))]
64
+        originalGraph.getMessages().map(a => replicatedVertices += ((a._1, (intVal, boolVal))))
65
+        val g_vertex : RDD[(VertexId, (Int, Boolean))] = context.parallelize(replicatedVertices)
66
+        //Replicate connections
67
+        val g_edges : RDD[Edge[String]] = context.parallelize(originalGraph.getConnections().values.to[collection.immutable.Seq])
68
+        //Create default if no relationship
69
+        //val defaultRelation = 0
70
+        //Build the graph
71
+        Graph(g_vertex, g_edges)//, defaultRelation)
72
+    }
73
+
74
+    /**Summary**
75
+    Gives a Integer,Integer,Boolean graph representation
76
+    with the same struct and connections of the original.
77
+    ***********/
78
+    def integerIntegerBool(): Graph[(Int, Int, Boolean), String] = {
79
+      //Replicate vertices
80
+      var replicatedVertices : ListBuffer[(VertexId, (Int, Int, Boolean))] =
81
+        new ListBuffer[(VertexId, (Int, Int, Boolean))]
82
+      originalGraph.getMessages().map(a => replicatedVertices += ((a._1, (0, 0, true))))
83
+      val g_vertex : RDD[(VertexId, (Int, Int, Boolean))] = context.parallelize(replicatedVertices)
84
+      //Replicate connections
85
+      val g_edges : RDD[Edge[String]] = context.parallelize(originalGraph.getConnections().values.to[collection.immutable.Seq])
86
+      //Create default if no relationship
87
+      //val defaultRelation = 0
88
+      //Build the graph
89
+      Graph(g_vertex, g_edges)//, defaultRelation)
90
+    }
91
+
92
+    /**Summary**
93
+    Gives a Float,Integer,Boolean graph representation
94
+    with the same struct and connections of the original.
95
+    The first value can be setted with the weight of the node.
96
+    ***********/
97
+    def FloatIntegerBool(first: Float = 0, second: Integer = 0,
98
+      third: Boolean = true, firstIsWeight: Boolean = false,
99
+      leafsBool: Boolean = true):
100
+      Graph[(Float, Int, Boolean), String] = {
101
+      //Replicate vertices
102
+      var replicatedVertices : ListBuffer[(VertexId, (Float, Int, Boolean))] =
103
+        new ListBuffer[(VertexId, (Float, Int, Boolean))]
104
+      if(firstIsWeight){
105
+        originalGraph.getMessages().map(a => {
106
+          var initValue : Boolean = leafsBool
107
+          if(originalGraph.getGraph().inDegrees.lookup(a._1) != Seq.empty[Int]){
108
+            initValue = third
109
+          }
110
+          replicatedVertices += ((a._1, (a._2.weight, second, initValue)))
111
+        })
112
+      }
113
+      else{
114
+        originalGraph.getMessages().map(a => {
115
+          var initValue : Boolean = leafsBool
116
+          if(originalGraph.getGraph().inDegrees.lookup(a._1) != Seq.empty[Int]){
117
+            initValue = third
118
+          }
119
+          replicatedVertices += ((a._1, (first, second, initValue)))
120
+        })
121
+      }
122
+      val g_vertex : RDD[(VertexId, (Float, Int, Boolean))] = context.parallelize(replicatedVertices)
123
+      //Replicate connections
124
+      val g_edges : RDD[Edge[String]] = context.parallelize(originalGraph.getConnections().values.to[collection.immutable.Seq])
125
+      //Create default if no relationship
126
+      //val defaultRelation = 0
127
+      //Build the graph
128
+      Graph(g_vertex, g_edges)//, defaultRelation)
129
+    }
130
+
131
+      def FloatIntegerLongBool( first: Float = 0,
132
+                                second: Integer = 0,
133
+                                third: Long = 0,
134
+                                fourth: Boolean = true,
135
+                                firstIsWeight: Boolean = false,
136
+                                leafsBool: Boolean = true):
137
+      Graph[(Float, Int, Long, Boolean), String] = {
138
+      //Replicate vertices
139
+      var replicatedVertices : ListBuffer[(VertexId, (Float, Int, Long, Boolean))] =
140
+        new ListBuffer[(VertexId, (Float, Int, Long, Boolean))]
141
+      if(firstIsWeight){
142
+        originalGraph.getMessages().map(a => {
143
+          var initValue : Boolean = leafsBool
144
+          if(originalGraph.getGraph().inDegrees.lookup(a._1) != Seq.empty[Int]){
145
+            initValue = fourth
146
+          }
147
+          replicatedVertices += ((a._1, (a._2.weight, second, third, initValue)))
148
+        })
149
+      }
150
+      else{
151
+        originalGraph.getMessages().map(a => {
152
+          var initValue : Boolean = leafsBool
153
+          if(originalGraph.getGraph().inDegrees.lookup(a._1) != Seq.empty[Int]){
154
+            initValue = fourth
155
+          }
156
+          replicatedVertices += ((a._1, (first, second, third, initValue)))
157
+        })
158
+      }
159
+      val g_vertex : RDD[(VertexId, (Float, Int, Long, Boolean))] = context.parallelize(replicatedVertices)
160
+      //Replicate connections
161
+      val g_edges : RDD[Edge[String]] = context.parallelize(originalGraph.getConnections().values.to[collection.immutable.Seq])
162
+      //Create default if no relationship
163
+      //val defaultRelation = 0
164
+      //Build the graph
165
+      Graph(g_vertex, g_edges)//, defaultRelation)
166
+    }
167
+
168
+
169
+    // Compute vertex representation as:
170
+    //
171
+    //  ((Weight,INdegree,DEFsindegree), (NumAcceptedAttackers,NumRejectedAttackers), NumReceivedMessages, Finished)
172
+    //
173
+    def FloatIntegerIntegerIntegerLongBool(
174
+        firstIsWeight: Boolean = false,
175
+        first: Float = 0,
176
+        second: Integer = 0, third: Integer = 0,  // (NumAcceptedAttackers,NumRejectedAttackers)
177
+        fourth: Boolean = false,                   // Finished default value
178
+        leafsInitState: Boolean = true  ) :
179
+    Graph[( (Float,Int,Int), (Int,Int), Long, Boolean), String] = {
180
+    //Replicate vertices
181
+    var replicatedVertices : ListBuffer[(VertexId, ((Float,Int,Int), (Int,Int), Long, Boolean))] =
182
+      new ListBuffer[(VertexId, ((Float,Int,Int), (Int,Int), Long, Boolean))]
183
+
184
+      //println( "INDEGREES: "+originalGraph.subgraph_indegrees.values.toList.toString())
185
+      //for (msg <- originalGraph.m_messages.values.toList) {
186
+
187
+        //   println(  " Node  "+ msg.id.toString() +" indegre: " + originalGraph.subgraph_indegrees(msg.id).toString())
188
+
189
+      // }
190
+
191
+    if(firstIsWeight){
192
+      originalGraph.getMessages().map(a => {
193
+        // println( " ID of vertex processed: "+a._1.toString())
194
+        var initValue : Boolean = leafsInitState
195
+        if(originalGraph.getGraph().inDegrees.lookup(a._1) != Seq.empty[Int]){
196
+          initValue = fourth
197
+        }
198
+        replicatedVertices += ((a._1, ((a._2.weight,originalGraph.subgraph_indegrees(a._1),originalGraph.subgraph_defindegrees(a._1)), (second,third), 0, initValue)))
199
+        // println( "Vertex processed")
200
+      })
201
+    }
202
+    else{
203
+      originalGraph.getMessages().map(a => {
204
+        var initValue : Boolean = leafsInitState
205
+        if(originalGraph.getGraph().inDegrees.lookup(a._1) != Seq.empty[Int]){
206
+          initValue = fourth
207
+        }
208
+        replicatedVertices += ((a._1, ((first,originalGraph.subgraph_indegrees(a._1),originalGraph.subgraph_defindegrees(a._1)), (second,third), 0, initValue)))
209
+      })
210
+    }
211
+    val g_vertex : RDD[(VertexId, ((Float,Int,Int), (Int,Int), Long, Boolean))] = context.parallelize(replicatedVertices)
212
+    //Replicate connections
213
+    val g_edges : RDD[Edge[String]] = context.parallelize(originalGraph.getConnections().values.to[collection.immutable.Seq])
214
+    //Create default if no relationship
215
+    //val defaultRelation = 0
216
+    //Build the graph
217
+    Graph(g_vertex, g_edges)//, defaultRelation)
218
+  }
219
+
220
+}

+ 264
- 0
distributed_social_network_analyzer/src/main/scala/GraphCalculous/RelationshipsCalcs.scala Zobrazit soubor

@@ -0,0 +1,264 @@
1
+//SparkContext
2
+import org.apache.spark.SparkContext
3
+import org.apache.spark.SparkConf
4
+
5
+//GraphX
6
+import org.apache.spark._
7
+import org.apache.spark.graphx._
8
+import org.apache.spark.rdd.RDD
9
+
10
+//Lists
11
+import scala.collection.mutable.ListBuffer
12
+
13
+class RelationshipsCalcs(){
14
+
15
+    /*****Summary*****
16
+    |Graph represents Weight / Defeaters / Finished
17
+    |Defeaters: -1 at initial state, to set it's value at least one time,
18
+    |avoiding mark them as finished
19
+    |Finished: True / False
20
+    *****************/
21
+    def calcDefeaters(sGraph: SocialGraph) : Int = {
22
+      // Init graph to (Weight, -1, False)
23
+      val graph = sGraph.getSubGraph(true, false, false).getRepresentation().FloatIntegerBool(0, -1, false, true, true)
24
+
25
+      val sssp = graph.pregel((0, 0, false))(
26
+        (id, main, received) => {
27
+          //Evaluate the joined messages and set the new state
28
+          if((main._2 == received._2 && received._3) || main._3){
29
+            //If the state it's the same as previous mark as finished
30
+            (main._1, received._2, true)
31
+          }
32
+          else{
33
+            (main._1, received._2, false)
34
+          }
35
+        },
36
+        triplet => {
37
+          //Compute the messages to send
38
+          if(triplet.srcAttr._1 >= triplet.dstAttr._1 && //Weight
39
+             triplet.dstAttr._3 == false){               //Dst is not finished
40
+            //Send a message
41
+            if(triplet.srcAttr._2 == 0){
42
+              Iterator((triplet.dstId, (0, 1, triplet.srcAttr._3)))
43
+            }
44
+            else{
45
+              Iterator((triplet.dstId, (0, 0, triplet.srcAttr._3)))
46
+            }
47
+          }
48
+          else{
49
+            //Don't send a message
50
+            Iterator.empty
51
+          }
52
+        },
53
+        (a,b) => {
54
+          //Join messages into one
55
+          (0, a._2 + b._2, a._3 && b._3)
56
+        }
57
+      )
58
+      val defeaterNodes = sssp.vertices.map(node => {
59
+        if(node._2._2 > 0){
60
+          0
61
+        }
62
+        else{
63
+          1
64
+        }
65
+      }).reduce((a, b) => a+b)
66
+      println("TOTAL DEFEATERS: " + defeaterNodes)
67
+      defeaterNodes
68
+
69
+    }
70
+
71
+
72
+    def optimizedCalcDefeaters(sGraph: SocialGraph) : Int = {
73
+      // Init graph to (Weight, -1, False)
74
+      val graph = sGraph.getSubGraph(true, false, false).getRepresentation().FloatIntegerBool(0, -1, false, true, true)
75
+
76
+      val sssp = graph.pregel((0, 0, false))(
77
+        (id, main, received) => {
78
+          //Evaluate the joined messages and set the new state
79
+          if((main._2 == received._2) || main._3){
80
+            //If the state it's the same as previous mark as finished
81
+            (main._1, received._2, true)
82
+          }
83
+          else{
84
+            if(!received._3){
85
+              (main._1, received._2, false)
86
+            }
87
+            else{
88
+              (main._1, received._2, true)
89
+            }
90
+          }
91
+        },
92
+        triplet => {
93
+          //Compute the messages to send
94
+          if(triplet.srcAttr._1 >= triplet.dstAttr._1 && //Weight
95
+             triplet.srcAttr._3 && !triplet.dstAttr._3){               //Dst is not finished
96
+            //Send a message
97
+            if(triplet.srcAttr._2 == 0){
98
+              Iterator((triplet.dstId, (0, 1, triplet.srcAttr._3)))
99
+            }
100
+            else{
101
+              Iterator((triplet.dstId, (0, 0, triplet.srcAttr._3)))
102
+            }
103
+          }
104
+          else{
105
+            //Don't send a message
106
+            Iterator.empty
107
+          }
108
+        },
109
+        (a,b) => {
110
+          //Join messages into one
111
+          (0, a._2 + b._2, true)
112
+        }
113
+      )
114
+      val defeaterNodes = sssp.vertices.map(node => {
115
+        if(node._2._2 > 0){
116
+          0
117
+        }
118
+        else{
119
+          1
120
+        }
121
+      }).reduce((a, b) => a+b)
122
+      println("TOTAL DEFEATERS: " + defeaterNodes)
123
+      defeaterNodes
124
+    }
125
+
126
+
127
+
128
+    def calcDefeatersMessageCount(sGraph: SocialGraph) : Long = {
129
+      // Init graph to (Weight, -1, False)
130
+      println("Preparing environment for calculate message count")
131
+      val graph = sGraph.getSubGraph(true, false, false).getRepresentation().FloatIntegerLongBool(0, 0, 0, false, true, true)
132
+      //println("USED VERTICES: " + graph.numVertices)
133
+      val sssp = graph.pregel((0, -1, 0, false))(
134
+        (id, main, received) => {
135
+          //Initial message, avoid mark leafs as unfinished
136
+          if(received._2 == -1){
137
+            (main._1, 0, 0, main._4)
138
+          }
139
+          else{
140
+            //If the state it's the same as previous mark as finished
141
+            (main._1, received._2, main._3 + received._3, main._2 == received._2 || received._4)
142
+          }
143
+        },
144
+        triplet => {
145
+          //Vote to finish
146
+          if(triplet.srcAttr._4 && triplet.dstAttr._4){
147
+            Iterator.empty
148
+          }
149
+          else{
150
+            //Compute the messages to send
151
+            //Send a message
152
+            if(triplet.srcAttr._2 == 0 && triplet.srcAttr._1 >= triplet.dstAttr._1){
153
+              Iterator((triplet.dstId, (0, 1, 1, triplet.srcAttr._4)))
154
+            }
155
+            else{
156
+              Iterator((triplet.dstId, (0, 0, 1, triplet.srcAttr._4)))
157
+            }
158
+          }
159
+        },
160
+        (a,b) => {
161
+          //Join messages into one
162
+          (0, a._2 + b._2, a._3 + b._3, a._4 && b._4)
163
+        }
164
+      )
165
+      val totalMessages = sssp.vertices.reduce((a,b) => (0, (0, 0, a._2._3 + b._2._3, true)))
166
+      println("TOTAL MESSAGES: " + totalMessages._2._3)
167
+      val defeaterNodes = sssp.vertices.map(node => {
168
+        if(node._2._2 > 0){
169
+          0
170
+        }
171
+        else{
172
+          1
173
+        }
174
+      }).reduce((a, b) => a+b)
175
+      println("TOTAL DEFEATERS: " + defeaterNodes)
176
+      defeaterNodes
177
+    }
178
+
179
+
180
+    def optimizedCalcDefeatersMessageCount(sGraph: SocialGraph ) : scala.xml.Elem = {
181
+      // Graph node state: (Weight, NumAcceptedAttackers, NumReceivedMessages, Finished )
182
+      //
183
+      // Init graph to ((Weight,INdegree,DEFsINdegree), (0,0), 0, False) this is the format for main (vertex state)
184
+      val graph = sGraph.getSubGraph(true, false, false).getRepresentation().FloatIntegerIntegerIntegerLongBool( true, 0, 0, 0, false, true)
185
+      var solutionlist = sGraph.getListOfDisconnectedNodes()
186
+
187
+      val sssp = graph.pregel(initialMsg = (0, (0,0), 0),activeDirection = EdgeDirection.Out)(
188
+        (id, main, received) => {
189
+          // Map message "received" by node "id" with current state "main"
190
+          // return new state with same format as main
191
+          //
192
+          //Evaluate the joined messages and set the new state
193
+          // println( "Node "+id.toString()+" State: "+main.toString()+" Message received: "+received.toString())
194
+
195
+          val newaccepteddefeaters = main._2._1 + received._2._1
196
+          val newrejecteddefeaters = main._2._2 + received._2._2
197
+          // Check if number of rejected defeaters equals to DEFsINdegree
198
+          if( (newrejecteddefeaters) ==  main._1._3 ){
199
+
200
+            // println(" PRE:"+main._2.toString()+"=CURRENT "+received._2.toString()+", vertex "+id.toString()+ " defeaters: "+received._2.toString()+ " is finished ")
201
+            (main._1,  (newaccepteddefeaters ,newrejecteddefeaters), main._3 + received._3, true)
202
+          }
203
+          else{
204
+            // Check if it has at least one accepted defeater
205
+            if ( (newaccepteddefeaters) >= 1){
206
+              // println(" PRE:"+main._2.toString()+"=CURRENT "+received._2.toString()+", vertex "+id.toString()+ " defeaters: "+received._2.toString()+ " is finished ")
207
+              (main._1, (newaccepteddefeaters , newrejecteddefeaters), main._3 + received._3, true )
208
+            }
209
+            else{
210
+              // println( "No final state for : " +id.toString()+ " state:   ("+newaccepteddefeaters.toString()+ " " + newrejecteddefeaters.toString() )
211
+              (main._1, (newaccepteddefeaters , newrejecteddefeaters), main._3 + received._3, false)
212
+            }
213
+          }
214
+        },
215
+        triplet => {
216
+          //Compute the message to send for edge edge triplet
217
+          // triplet.srcAttr, triplet.attr, triplet.dstAttr
218
+          if(triplet.srcAttr._1._1 >= triplet.dstAttr._1._1 && //Weight Source >= Weight Destination
219
+             triplet.srcAttr._4 && !triplet.dstAttr._4){       // Src Finished but Dst is not finished
220
+            //Send a "defeater accepted message" to dstId, if #rejected attackers == INdegree
221
+            if(triplet.srcAttr._2._2 == triplet.srcAttr._1._3){
222
+              // If Source node has ALL defeaters rejected, add one more accepted defeater for Dst
223
+              // println( " MESSAGE: "+ triplet.srcId.toString()+ " -> "+triplet.dstId.toString()+" = (1,0)")
224
+              Iterator((triplet.dstId, (0, (1,0), 1)))
225
+            }
226
+            else { if (triplet.srcAttr._2._1 >= 1) {
227
+              // If Source node has some accepted defeaters, add one more  rejected defeater for Dst
228
+              // println( " MESSAGE: "+ triplet.srcId.toString()+ " -> "+triplet.dstId.toString()+" = (0,1)")
229
+              Iterator((triplet.dstId, (0, (0,1), 1)))
230
+            }
231
+            else{
232
+              //Don't send a message
233
+              Iterator.empty
234
+            } }
235
+          }
236
+          else{
237
+            //Don't send a message
238
+            Iterator.empty
239
+          }
240
+        },
241
+        (a,b) => {
242
+          //Join messages into one
243
+          (0, (a._2._1 + b._2._1, a._2._2 + b._2._2), a._3 + b._3 )
244
+        }
245
+      )
246
+      val totalMessages = sssp.vertices.reduce((a,b) => (0, ((0,0,0), (0,0), a._2._3 + b._2._3, true)))
247
+      println("TOTAL MESSAGES: " + totalMessages._2._3)
248
+      val defeaterNodes = sssp.vertices.map(node => {
249
+        if(node._2._2._2 == node._2._1._3) {  1  }  else{  0  }       }
250
+      ).reduce((a, b) => a+b)
251
+
252
+      // node =  (id,((Weight,INdegree), ( accepted,rejected ), messagesrecv, Finished))
253
+      val listofdefeaterNodes = sssp.vertices.filter( node => (node._2._2._2 == node._2._1._3)).
254
+         map( node => {node._1.toString() } ).collect()
255
+      println("total ACCEPTED without disconnected: " + defeaterNodes)
256
+
257
+      listofdefeaterNodes.foreach( solutionlist += )
258
+      println("LIST OF ACCEPTED ONES:" )
259
+      solutionlist.foreach( println )
260
+      println("END OF LIST" )
261
+      println("TOTAL ACCEPTED: "+solutionlist.length.toString())
262
+      <ArgumentationAnswers><answer>{solutionlist.map(i => <arg id={i} />)}</answer></ArgumentationAnswers>
263
+    }
264
+  }

+ 79
- 0
distributed_social_network_analyzer/src/main/scala/Learning/MP/MPClassifier.scala Zobrazit soubor

@@ -0,0 +1,79 @@
1
+import org.apache.spark.sql.DataFrame
2
+import org.apache.spark.sql.SparkSession
3
+import java.io._
4
+import scala.io._
5
+import scala.util.matching.Regex
6
+import scala.collection.mutable.ListBuffer
7
+
8
+// Import classes for MLLib
9
+import org.apache.spark.ml.classification.MultilayerPerceptronClassifier
10
+import org.apache.spark.ml.evaluation.MulticlassClassificationEvaluator
11
+import org.apache.spark.ml.classification.MultilayerPerceptronClassificationModel;
12
+
13
+// @Annotation
14
+// TODO: Re-Think how to classify tweets passed by parameter and how to return
15
+// the result. Modify directly the file (?)
16
+
17
+
18
+
19
+
20
+// MultilayerPerceptronClassifier
21
+class MPC(ss: SparkSession, input: String,
22
+                            indexes: String,
23
+                            model: String){
24
+
25
+    // Load indexes
26
+    // if (input != "") {
27
+    val text = ss.read.textFile(indexes)
28
+    val pair_indexes = text.collect().map(parseTweetPair).toList
29
+    // Load Data
30
+    val data = ss.read.format("libsvm").load(input)
31
+    // Load Model
32
+    val mp_model = MultilayerPerceptronClassificationModel.load(model)
33
+    // Classifies
34
+    val results = mp_model.transform(data)
35
+    // Get Prediction column
36
+    val prediction_column = results.select("prediction")
37
+    // Get Predictions
38
+    var predictions = prediction_column.collect.map(_.getDouble(0).toInt)
39
+  // }
40
+
41
+
42
+  // def this(ss: SparkSession ) {
43
+  // this(ss,"","","")
44
+  // }
45
+  
46
+  def getRelationship(id1: Long, id2: Long): String = {
47
+    var pair = TweetPair(id1.toString, id2.toString)
48
+    var index = getIndexes(pair, pair_indexes)
49
+    var label = if (index > 0) predictions(index) else 0
50
+    return getMeaningOfLabel(label)
51
+  }
52
+
53
+  def getIndexes(pair: TweetPair, indexes: List[TweetPair]): Int = {
54
+    var i = 0
55
+    for(index <- indexes){
56
+      if (index.equals(pair))
57
+        return i
58
+      i += 1
59
+    }
60
+    return -1
61
+  }
62
+
63
+  def getMeaningOfLabel(relation: Int): String = {
64
+    val meaning = relation match{
65
+      case 1 => "attack"
66
+      case 2 => "support"
67
+      case _ => "noone"
68
+    }
69
+    return meaning
70
+  }
71
+
72
+  case class TweetPair(id1: String, id2: String)
73
+
74
+  def parseTweetPair(str: String): TweetPair = {
75
+    val line = str.split("  ")
76
+    TweetPair(line(0), line(1))
77
+  }
78
+
79
+}

+ 101
- 0
distributed_social_network_analyzer/src/main/scala/Learning/MP/MPTraining.scala Zobrazit soubor

@@ -0,0 +1,101 @@
1
+import org.apache.spark.{SparkConf, SparkContext}
2
+import org.apache.spark.sql.SparkSession
3
+import org.apache.spark.sql.DataFrame
4
+import scala.util.Random
5
+
6
+// Import classes for ML
7
+import org.apache.spark.ml.linalg.Vector
8
+import org.apache.spark.ml.classification.MultilayerPerceptronClassifier
9
+import org.apache.spark.ml.evaluation.MulticlassClassificationEvaluator
10
+import org.apache.spark.ml.classification.MultilayerPerceptronClassificationModel;
11
+
12
+// MultilayerPerceptronTraining
13
+class MPT(ss: SparkSession, input: String) {
14
+
15
+  // Load the data stored in LIBSVM format as a DataFrame.
16
+  val data = ss.read.format("libsvm").load(input)
17
+
18
+  case class Params(var training_size : Double = 0.75, var maxIters : Int = 100, var seed: Long = 1234L,
19
+               var block: Int = 128, var layers : Array[Int] = Array.empty[Int])
20
+
21
+  def generateModel(in_params: Params, output_file: String = "",
22
+                    random: Boolean = false): Double = {
23
+
24
+
25
+    // Set Layers
26
+    val numFeatures = data.first().getAs[Vector](1).size
27
+    val classes = 3
28
+
29
+    var params = in_params
30
+    setLayer(params, numFeatures, classes)
31
+
32
+    if (random)
33
+      getRandomParams(params)
34
+
35
+
36
+    // Split the data into train and test
37
+    val splits = data.randomSplit(Array(params.training_size, 1-params.training_size))
38
+    val (train, test) = (splits(0), splits(1))
39
+
40
+    // Generate Model
41
+    val trainer = new MultilayerPerceptronClassifier()
42
+      .setLayers(params.layers)
43
+      .setBlockSize(params.block)
44
+      .setSeed(params.seed)
45
+      .setMaxIter(params.maxIters)
46
+    val model = trainer.fit(train)
47
+
48
+    // Save model
49
+    if(output_file == "")
50
+      model.write.overwrite().save("target/tmp/MPM")
51
+    else
52
+      model.write.overwrite().save(output_file)
53
+
54
+    // Get Accuracy
55
+    val accuracy = getAccuracy(model, test)
56
+    return accuracy
57
+    }
58
+
59
+    // Set to the layers the features and classes
60
+    def setLayer(params: Params, num_features : Int, output_classes: Int){
61
+      params.layers = Array(num_features) ++ params.layers ++ Array(output_classes)
62
+    }
63
+
64
+    // Dummy Sample
65
+    def getRandomParams(params: Params){
66
+      var i = 5
67
+    }
68
+
69
+    // Generates random layers
70
+    def getRandomLayer(num_features : Int, output_classes: Int): Array[Int] = {
71
+      val r = Random
72
+      var num_layers = r.nextInt(5)
73
+      if (num_layers != 0)
74
+        num_layers += 1
75
+      val layers = new Array[Int](num_layers + 2)
76
+
77
+      //Initial Features
78
+      layers(0) = num_features
79
+
80
+      //Output classes
81
+      layers(num_layers+1) = output_classes
82
+
83
+      for(i <- 1 to num_layers)
84
+        layers(i) = r.nextInt(50) + 10
85
+
86
+      return layers
87
+    }
88
+
89
+    // Returns accuracy of a given model with a data set
90
+    def getAccuracy(model: MultilayerPerceptronClassificationModel,
91
+                      test: DataFrame): Double = {
92
+      //compute accuracy on the test set
93
+      val result = model.transform(test)
94
+      val predictionAndLabels = result.select("prediction", "label")
95
+      val evaluator = new MulticlassClassificationEvaluator()
96
+        .setMetricName("accuracy")
97
+
98
+      val accuracy = evaluator.evaluate(predictionAndLabels)
99
+      return accuracy
100
+    }
101
+}

+ 25
- 0
distributed_social_network_analyzer/src/main/scala/Learning/MachineLearning.scala Zobrazit soubor

@@ -0,0 +1,25 @@
1
+// import org.apache.spark.sql.SparkSession
2
+// import org.apache.spark.sql.DataFrame
3
+// import scala.collection.mutable.ListBuffer
4
+//
5
+//
6
+// object MachineLearning{
7
+//   def main(args: Array[String]) {
8
+//     val ss = SparkSession
9
+//       .builder
10
+//       .appName("Machine Learning")
11
+//       .getOrCreate()
12
+//
13
+//     //Execute methods
14
+//     val model = new MPT(ss, args(0))
15
+//     val accuracy = model.generateModel(new model.Params(), "target/tmp/MPM")
16
+//     println("DEBUG : ACCURACY => " + accuracy)
17
+//
18
+//     val classifier = new MPC(ss, args(0), args(1), "target/tmp/MPM")
19
+//     var relationShip = classifier.getRelationship(588748014438764544L, 588727284061863936L)
20
+//     println("DEBUG : LABEL => " + relationShip)
21
+//
22
+//     ss.stop()
23
+//   }
24
+//
25
+// }

+ 134
- 0
distributed_social_network_analyzer/src/main/scala/Learning/OtherModels/DecisionTreeModelCustom.scala Zobrazit soubor

@@ -0,0 +1,134 @@
1
+import org.apache.spark.{SparkConf, SparkContext}
2
+import org.apache.spark.sql.SparkSession
3
+import org.apache.spark.sql.DataFrame
4
+
5
+// Import classes for ML
6
+import org.apache.spark.ml.{Pipeline,PipelineModel}
7
+import org.apache.spark.ml.classification.DecisionTreeClassifier
8
+import org.apache.spark.ml.evaluation.MulticlassClassificationEvaluator
9
+import org.apache.spark.ml.feature.{IndexToString, StringIndexer, VectorIndexer}
10
+
11
+// DecisionTreeModelCustom
12
+object DTMC {
13
+
14
+	class Params(var input : String, var output : String, var maxDepth : Int,
15
+               var maxBins: Int) {
16
+    def this() {
17
+        this("", "", 8, 200);
18
+    }
19
+		override def toString : String =
20
+		      "Params: \n" +
21
+		      " - Input =    " + input + "\n" +
22
+		      " - Output =   " + output + "\n" +
23
+		      " - MaxDepth = " + maxDepth + "\n" +
24
+		      " - MaxBins =  " + maxBins + "\n"
25
+		  }
26
+
27
+	def main(args: Array[String]) {
28
+		val sc = SparkSession
29
+      .builder
30
+      .appName("Decision Tree Model")
31
+      .getOrCreate()
32
+
33
+		//Parse and Save Params
34
+		val arglist = args.toList
35
+		val params = new Params()
36
+		getOptions(arglist, params)
37
+
38
+		if(params.input == ""){
39
+			val msg = """Usage -> arguments: -arg value
40
+									argument: -input required""".stripMargin
41
+			println(msg)
42
+			sys.exit(1)
43
+		}
44
+
45
+		// Load the data stored in LIBSVM format as a DataFrame.
46
+    val data = sc.read.format("libsvm").load(params.input)
47
+
48
+		// Split the data into train and test
49
+    val splits = data.randomSplit(Array(0.75, 0.25))
50
+    val (train, test) = (splits(0), splits(1))
51
+
52
+		// Index labels, adding metadata to the label column.
53
+		// Fit on whole dataset to include all labels in index.
54
+		val labelIndexer = new StringIndexer()
55
+		  .setInputCol("label")
56
+		  .setOutputCol("indexedLabel")
57
+		  .fit(data)
58
+		// Automatically identify categorical features, and index them.
59
+		val featureIndexer = new VectorIndexer()
60
+		  .setInputCol("features")
61
+		  .setOutputCol("indexedFeatures")
62
+		  .setMaxCategories(350) // features with > 4 distinct values are treated as continuous.
63
+		  .fit(data)
64
+
65
+		// Train a DecisionTree model.
66
+		val dt = new DecisionTreeClassifier()
67
+		  .setLabelCol("indexedLabel")
68
+		  .setFeaturesCol("indexedFeatures")
69
+			.setFeaturesCol("indexedFeatures")
70
+			.setMaxDepth(params.maxDepth)
71
+			.setMaxBins(params.maxBins)
72
+			// .setMinInstancesPerNode(params.minInstancesPerNode)
73
+			// .setMinInfoGain(params.minInfoGain)
74
+			// .setCacheNodeIds(params.cacheNodeIds)
75
+			// .setCheckpointInterval(params.checkpointInterval)
76
+
77
+		// Convert indexed labels back to original labels.
78
+		val labelConverter = new IndexToString()
79
+		  .setInputCol("prediction")
80
+		  .setOutputCol("predictedLabel")
81
+		  .setLabels(labelIndexer.labels)
82
+
83
+		// Chain indexers and tree in a Pipeline.
84
+		val pipeline = new Pipeline()
85
+		  .setStages(Array(labelIndexer, featureIndexer, dt, labelConverter))
86
+
87
+		// Train model. This also runs the indexers.
88
+		val model = pipeline.fit(train)
89
+
90
+    // Get Accuracy
91
+		val accuracy = getAccuracy(model, test)
92
+		println("Acurracy: " + accuracy)
93
+
94
+		// Save model
95
+    if(params.output == "")
96
+      model.write.overwrite().save("target/tmp/DTM")
97
+    else
98
+      model.write.overwrite().save(params.output)
99
+
100
+		sc.stop()
101
+  	}
102
+
103
+		def getOptions(list: List[String], params: Params){
104
+      def isSwitch(s : String) = (s(0) == '-')
105
+      list match {
106
+        case Nil =>
107
+        case "-input"  :: value :: tail => params.input = value.toString
108
+                                           getOptions(tail, params)
109
+        case "-output" :: value :: tail => params.output = value.toString
110
+                                           getOptions(tail, params)
111
+        case "-maxd"    :: value :: tail => params.maxDepth = value.toInt
112
+                                           getOptions(tail, params)
113
+				case "-maxb"    :: value :: tail => params.maxBins = value.toInt
114
+																	         getOptions(tail, params)
115
+        case option :: tail => println("Unknown option " + option)
116
+                               sys.exit(1)
117
+      }
118
+
119
+    }
120
+
121
+		//Auxiliar Function: returns accuracy of a given model with a data set
122
+    def getAccuracy(model: PipelineModel,
123
+                      test: DataFrame): Double = {
124
+      //compute accuracy on the test set
125
+      val result = model.transform(test)
126
+      val predictionAndLabels = result.select("prediction", "label")
127
+      val evaluator = new MulticlassClassificationEvaluator()
128
+        .setMetricName("accuracy")
129
+
130
+      val accuracy = evaluator.evaluate(predictionAndLabels)
131
+      return accuracy
132
+    }
133
+
134
+}

+ 115
- 0
distributed_social_network_analyzer/src/main/scala/Learning/OtherModels/Examples/ExampleFlights.scala Zobrazit soubor

@@ -0,0 +1,115 @@
1
+import org.apache.spark.{SparkConf, SparkContext}
2
+
3
+import org.apache.spark.rdd.RDD
4
+
5
+// Import classes for MLLib
6
+import org.apache.spark.mllib.regression.LabeledPoint
7
+import org.apache.spark.mllib.linalg.Vectors
8
+import org.apache.spark.mllib.tree.DecisionTree
9
+import org.apache.spark.mllib.tree.model.DecisionTreeModel
10
+import org.apache.spark.mllib.util.MLUtils
11
+
12
+object ExampleFlights {
13
+
14
+	def main(args: Array[String]) {
15
+		val conf = new SparkConf().setAppName("EF")
16
+		val sc = new SparkContext(conf)
17
+
18
+		//Load Data into a RDD, modify route to required
19
+		val textRDD = sc.textFile("flights.csv")
20
+
21
+		//Parse the RDD of csv lines into an RDD of flight classes
22
+		val flightsRDD = textRDD.map(parseFlight).cache()
23
+		cPrint(flightsRDD.first().toString)
24
+
25
+		// EXTRACT FEATURES: map string city to int
26
+		var cityMap: Map[String, Int] = Map()
27
+		var index: Int = 0
28
+		flightsRDD.map(flight => flight.src).distinct.collect.foreach(
29
+					   x => { cityMap += (x -> index); index += 1 })
30
+		cPrint(cityMap.toString)
31
+
32
+		// DEFINE FEATURES ARRAY
33
+		val mlprep = flightsRDD.map(flight => {
34
+					  val src = cityMap(flight.src) // category
35
+					  val dest = cityMap(flight.dest) // category
36
+					  val id = flight.id.toInt
37
+					  val price = flight.price.toInt
38
+					  val expensive = if (flight.price.toInt > 500) 1 else 0
39
+					  Array(expensive.toInt, src.toInt, dest.toInt, id.toInt)
40
+					})
41
+		println(mlprep.take(5).deep.mkString("\n"))
42
+
43
+		// CREATE LABELED POINTS
44
+		val mldata = mlprep.map(x => LabeledPoint(x(0), Vectors.dense(x(1), x(2), x(3))))
45
+		println(mldata.take(5).deep.mkString("\n"))
46
+
47
+		// SPLIT DATA: TRAINING SET - TEST SET
48
+		// Flights with price < 500 -> get 50%
49
+		val mldata0 = mldata.filter(x => x.label == 0.0).randomSplit(Array(0.50, 0.50))(1)
50
+		// Flights with price > 500 -> get 50%
51
+		val mldata1 = mldata.filter(x => x.label == 1.0).randomSplit(Array(0.50, 0.50))(1)
52
+		// FLights selected
53
+		//val mldata2 = mldata0 ++ mldata1
54
+		val mldata2 = mldata
55
+
56
+		//Split mldata2 into training and test data
57
+		val splits = mldata2.randomSplit(Array(0.3, 0.7))
58
+		val (trainingData, testData) = (splits(0), splits(1))
59
+
60
+		// INFO ABOUT FEATURES
61
+		var categoricalFeaturesInfo = Map[Int, Int]()
62
+		categoricalFeaturesInfo += (0 -> cityMap.size) //src
63
+		categoricalFeaturesInfo += (1 -> cityMap.size) //dest
64
+
65
+
66
+		// VALUES OF MODEL
67
+		val numClasses = 2
68
+		// Defining values for the other parameters
69
+		val impurity = "gini"
70
+		val maxDepth = 10
71
+		val maxBins = 5000
72
+
73
+		// Call DecisionTree trainClassifier with the trainingData , which returns the model
74
+		val model = DecisionTree.trainClassifier(trainingData, numClasses,
75
+			categoricalFeaturesInfo, impurity, maxDepth, maxBins)
76
+		model.toDebugString
77
+
78
+		//val labelAndPreds = testData.map { point =>
79
+		val labelAndPreds = testData.map { point =>
80
+			val prediction = model.predict(point.features)
81
+			(point.label, prediction)
82
+			}
83
+
84
+		val goodPrediction =(labelAndPreds.filter{
85
+			case (label, prediction) => ( label == prediction)
86
+			})
87
+		val wrongPrediction =(labelAndPreds.filter{
88
+			case (label, prediction) => ( label != prediction)
89
+			})
90
+		println(labelAndPreds.take(10000).deep.mkString("\n"))
91
+
92
+		val good = goodPrediction.count()
93
+		val wrong = wrongPrediction.count()
94
+		val ratioWrong=wrong.toDouble/testData.count()
95
+
96
+		val results = "Good Prediction : " + good.toString + "\n" +
97
+					  "Wrong Prediction: " + wrong.toString + "\n" +
98
+					  "Ratio Wrong     : " + ratioWrong.toString + "\n"
99
+		cPrint(results)
100
+
101
+		sc.stop()
102
+  	}
103
+
104
+	case class Flight(src: String, dest: String, id: Int, price: Int)
105
+
106
+	def parseFlight(str: String): Flight = {
107
+		val line = str.split(",")
108
+		Flight(line(0), line(1), line(2).toInt, line(3).toInt)
109
+	}
110
+
111
+	def cPrint(str: String) {
112
+		val output = "\n" + str + "\n\n"
113
+		print(output)
114
+	}
115
+}

+ 111
- 0
distributed_social_network_analyzer/src/main/scala/Learning/OtherModels/NaiveBayesModelCustom.scala Zobrazit soubor

@@ -0,0 +1,111 @@
1
+import org.apache.spark.{SparkConf, SparkContext}
2
+import org.apache.spark.sql.SparkSession
3
+import org.apache.spark.sql.DataFrame
4
+
5
+// Import classes for ML
6
+import org.apache.spark.ml.classification.NaiveBayes
7
+import org.apache.spark.ml.classification.NaiveBayesModel
8
+import org.apache.spark.ml.evaluation.MulticlassClassificationEvaluator
9
+
10
+object NBMC {
11
+
12
+  class Params(var input : String, var output : String, var maxIters : Int,
13
+               var block: Int, var seed: Long, var layers : Array[Int]) {
14
+    def this() {
15
+        this("", "", 100, 128, 1234L, Array[Int]());
16
+    }
17
+    override def toString : String =
18
+      "Params: \n" +
19
+      " - Input =    " + input + "\n" +
20
+      " - Output =   " + output + "\n" +
21
+      " - MaxIters = " + maxIters + "\n" +
22
+      " - Block =    " + block + "\n" +
23
+      " - Seed =     " + seed + "\n" +
24
+      " - Layers =   " + layers + "\n"
25
+
26
+  }
27
+
28
+  def main(args: Array[String]) {
29
+    val sc = SparkSession
30
+      .builder
31
+      .appName("NaiveBayes Model")
32
+      .getOrCreate()
33
+
34
+    //Parse and Save Params
35
+    val arglist = args.toList
36
+    val params = new Params()
37
+    getOptions(arglist, params)
38
+
39
+    if(params.input == ""){
40
+      val msg = """Usage -> arguments: -arg value
41
+                  argument: -input required""".stripMargin
42
+      println(msg)
43
+      sys.exit(1)
44
+    }
45
+
46
+    // Load the data stored in LIBSVM format as a DataFrame.
47
+    val data = sc.read.format("libsvm").load(params.input)
48
+
49
+    // Split the data into train and test
50
+    val splits = data.randomSplit(Array(0.75, 0.25))
51
+    val (train, test) = (splits(0), splits(1))
52
+
53
+    // Parse params
54
+    parseParams(params)
55
+
56
+    // Train a NaiveBayes model.
57
+    val model = new NaiveBayes()
58
+      .fit(train)
59
+
60
+    // Get Accuracy
61
+    val accuracy = getAccuracy(model, test)
62
+    printf("Accuracy: " + accuracy + "\n")
63
+
64
+    // Save model
65
+    if(params.output == "")
66
+      model.write.overwrite().save("target/tmp/MPM")
67
+    else
68
+      model.write.overwrite().save(params.output)
69
+
70
+    sc.stop()
71
+    }
72
+
73
+    def getOptions(list: List[String], params: Params){
74
+      def isSwitch(s : String) = (s(0) == '-')
75
+      list match {
76
+        case Nil =>
77
+        case "-input"  :: value :: tail => params.input = value.toString
78
+                                           getOptions(tail, params)
79
+        case "-output" :: value :: tail => params.output = value.toString
80
+                                           getOptions(tail, params)
81
+        case "-max"    :: value :: tail => params.maxIters = value.toInt
82
+                                           getOptions(tail, params)
83
+        case "-block"  :: value :: tail => params.block = value.toInt
84
+                                           getOptions(tail, params)
85
+        case "-seed"   :: value :: tail => params.seed = value.toLong
86
+                                           getOptions(tail, params)
87
+        case option :: tail => println("Unknown option " + option)
88
+                               sys.exit(1)
89
+      }
90
+
91
+    }
92
+
93
+    def parseParams(params: Params){
94
+      //seed
95
+      params.seed = System.currentTimeMillis()
96
+    }
97
+
98
+
99
+    //Auxiliar Function: returns accuracy of a given model with a data set
100
+    def getAccuracy(model: NaiveBayesModel,
101
+                      test: DataFrame): Double = {
102
+      //compute accuracy on the test set
103
+      val result = model.transform(test)
104
+      val predictionAndLabels = result.select("prediction", "label")
105
+      val evaluator = new MulticlassClassificationEvaluator()
106
+        .setMetricName("accuracy")
107
+
108
+      val accuracy = evaluator.evaluate(predictionAndLabels)
109
+      return accuracy
110
+    }
111
+}

+ 37
- 0
distributed_social_network_analyzer/src/main/scala/MainApp.scala Zobrazit soubor

@@ -0,0 +1,37 @@
1
+//SparkContext
2
+import org.apache.spark.sql.SparkSession
3
+import org.apache.spark.sql.DataFrame
4
+import scala.collection.mutable.ListBuffer
5
+import org.apache.spark.SparkContext
6
+import org.apache.spark.SparkConf
7
+
8
+//GraphX TODO: TEST ONLY
9
+import org.apache.spark._
10
+import org.apache.spark.graphx._
11
+import org.apache.spark.rdd.RDD
12
+
13
+//Lists
14
+import scala.collection.mutable.ListBuffer
15
+
16
+//Read files
17
+import scala.io.Source
18
+
19
+object MainApp{
20
+  def main(args: Array[String]){
21
+    val ss = SparkSession
22
+      .builder
23
+      .appName("Main Application")
24
+      .getOrCreate()
25
+    val sc = ss.sparkContext
26
+    val path = "Storage/Jsons/" + args(0) + ".json"
27
+    //Create MPC
28
+    val parserCaller = new ParserCaller(ss)
29
+    val classifier = parserCaller.parseJson(args(0))
30
+    //Create graph
31
+    val graph = new TwitterGraph(sc, classifier)
32
+    graph.parseConversationFromFile(path)
33
+    val calcs : RelationshipsCalcs = new RelationshipsCalcs()
34
+    calcs.optimizedCalcDefeatersMessageCount(graph)
35
+    ss.stop()
36
+  }
37
+}

+ 49
- 0
distributed_social_network_analyzer/src/main/scala/MainAppFromXML.scala Zobrazit soubor

@@ -0,0 +1,49 @@
1
+//SparkContext
2
+import org.apache.spark.sql.SparkSession
3
+import org.apache.spark.sql.DataFrame
4
+import scala.collection.mutable.ListBuffer
5
+import org.apache.spark.SparkContext
6
+import org.apache.spark.SparkConf
7
+import org.apache.log4j._
8
+
9
+//GraphX TODO: TEST ONLY
10
+import org.apache.spark._
11
+import org.apache.spark.graphx._
12
+import org.apache.spark.rdd.RDD
13
+
14
+//Lists
15
+import scala.collection.mutable.ListBuffer
16
+
17
+//Read files
18
+import scala.io.Source
19
+
20
+import scala.xml.XML
21
+
22
+// <ArgumentationAnswers ArgumentationProblemInstance=name
23
+//   Semantics="VAFideal"><answer><arg id="588729571383574528" /></answer>></ArgumentationAnswers>
24
+
25
+
26
+object MainAppFromXML{
27
+  def main(args: Array[String]){
28
+    val ss = SparkSession
29
+      .builder
30
+      .appName("Main Application")
31
+      .getOrCreate()
32
+    val sc = ss.sparkContext
33
+    // <ArgumentationAnswers><answer> </answer></ArgumentationAnswers>
34
+
35
+    LogManager.getLogger("org").setLevel(Level.OFF)
36
+    val path = args(0) + ".xml"
37
+    //Create MPC
38
+    // val parserCaller = new ParserCaller(ss)
39
+    // val classifier = parserCaller.parseJson(args(0))
40
+    // val nullclassifier = new MPC(ss)
41
+    //Create graph
42
+    val graph = new TwitterGraphFromXML( sc )
43
+    graph.parseXMLLabelledConversation( path )
44
+    val calcs : RelationshipsCalcs = new RelationshipsCalcs()
45
+    val xmlsolution = calcs.optimizedCalcDefeatersMessageCount(graph)
46
+    ss.stop()
47
+    XML.save(args(0)+"-xml.sol",xmlsolution)
48
+  }
49
+}

+ 22
- 0
distributed_social_network_analyzer/src/main/scala/ParserCaller/ParserCaller.scala Zobrazit soubor

@@ -0,0 +1,22 @@
1
+//SparkContext
2
+import org.apache.spark.sql.SparkSession
3
+import org.apache.spark.sql.DataFrame
4
+import scala.collection.mutable.ListBuffer
5
+import org.apache.spark.SparkContext
6
+import org.apache.spark.SparkConf
7
+
8
+class ParserCaller (session: SparkSession){
9
+  def parseJson(id: String) : MPC = {
10
+    val context = session.sparkContext
11
+    val indexes = "Storage/Files/pairs-" + id + ".txt"
12
+    val input = "Storage/Files/attributes-" + id + ".txt"
13
+    val model = "Storage/Models/MPM"
14
+    // val scriptPath = "parser/parser_all.py"
15
+    // val scriptName = "parser_all.py"
16
+    // context.addFile(scriptPath)
17
+    // val data = context.parallelize(List(json_file))
18
+    // val pipeRDD = data.pipe(scriptPath)
19
+    // pipeRDD.collect()
20
+    return new MPC(session, input, indexes, model)
21
+  }
22
+}

+ 226
- 0
distributed_social_network_analyzer/src/main/scala/Tests/BasicTests.scala Zobrazit soubor

@@ -0,0 +1,226 @@
1
+//SparkContext & Session
2
+import org.apache.spark.sql.SparkSession
3
+import org.apache.spark.SparkContext
4
+import org.apache.spark.SparkConf
5
+
6
+//GraphX TODO: TEST ONLY
7
+import org.apache.spark._
8
+import org.apache.spark.graphx._
9
+import org.apache.spark.rdd.RDD
10
+
11
+//Lists
12
+import scala.collection.mutable.ListBuffer
13
+
14
+//Read files
15
+import scala.io.Source
16
+
17
+class BasicTests(session: SparkSession){
18
+
19
+  val context = session.sparkContext
20
+
21
+  def doAllTests(){
22
+    graphParsingTest()
23
+    graphCreatingTest()
24
+    parseSingleTweetTest()
25
+    parseTweetConversation()
26
+    multireferenceTweetConversation()
27
+  }
28
+
29
+  def graphParsingTest(){
30
+    val jGraph = new SocialGraph(context)
31
+
32
+    //Make the connections, both systems are equivalent
33
+
34
+    jGraph.parseConnection("{\"origin\":1," +
35
+                            "\"destination\":2," +
36
+                            "\"intention\":\"noone\"}")
37
+    jGraph.parseConnection("{\"origin\":1," +
38
+                            "\"destination\":3," +
39
+                            "\"intention\":\"noone\"}")
40
+
41
+    //Make the messages identifier, author and Json with properties)
42
+
43
+    jGraph.parseMessage("{\"id\":1," +
44
+                         "\"weight\":1," +
45
+                         "\"author\":\"author1\"," +
46
+                         "\"message\":\"content1\"," +
47
+                         "\"properties\":{\"property\":\"something\"}}")
48
+    jGraph.parseMessage("{\"id\":2," +
49
+                         "\"weight\":1," +
50
+                         "\"author\":\"author2\"," +
51
+                         "\"message\":\"content1\"," +
52
+                         "\"properties\":{\"property\":\"something\"}}")
53
+    jGraph.parseMessage("{\"id\":3," +
54
+                         "\"weight\":1," +
55
+                         "\"author\":\"author3\"," +
56
+                         "\"message\":\"content2\"," +
57
+                         "\"properties\":{\"property\":\"something\"}}")
58
+
59
+    jGraph.makeGraph()
60
+
61
+    val graph = jGraph.getGraph()
62
+
63
+    val contentCount = graph.vertices.filter{
64
+      case(id, message) => message.message == "content1"
65
+    }.count
66
+
67
+    val relationToRootCount = graph.edges.filter(e => e.srcId < e.dstId).count
68
+
69
+
70
+    println("Total messages with content 1: " + contentCount + "\n" +
71
+      "Total edges going to higher node id: " + relationToRootCount)
72
+  }
73
+
74
+  def listSubGraphTriplets(){
75
+    val jGraph = new SocialGraph(context)
76
+    jGraph.addConnection(2, 1, "attack")
77
+    jGraph.addConnection(3, 2, "support")
78
+    jGraph.addConnection(4, 1, "support")
79
+    jGraph.addConnection(5, 4, "attack")
80
+    jGraph.addConnection(6, 4, "noone")
81
+    val someMap : Map[String, Any] = Map("Something" -> "Content")
82
+    jGraph.addMessage(1, "author1", "content1", someMap, 1)
83
+    jGraph.addMessage(2, "author2", "content2", someMap, 2)
84
+    jGraph.addMessage(3, "author3", "content3", someMap, 3)
85
+    jGraph.addMessage(4, "author4", "content4", someMap, 1)
86
+    jGraph.addMessage(5, "author5", "content5", someMap, 2)
87
+    jGraph.addMessage(6, "author6", "content6", someMap, 3)
88
+
89
+    val graph = jGraph.getSubGraph(true, false, false).getGraph()
90
+    val facts: RDD[String] =
91
+    graph.triplets.map(triplet =>
92
+    triplet.srcAttr.author + " is " + triplet.attr + " with " + triplet.dstAttr.author)
93
+    facts.collect.foreach(println(_))
94
+  }
95
+
96
+  def graphCreatingTest(){
97
+    val jGraph = new SocialGraph(context)
98
+    jGraph.addConnection(1, 2, "noone")
99
+    jGraph.addConnection(1, 3, "noone")
100
+    val someMap : Map[String, Any] = Map("Something" -> "Content")
101
+    jGraph.addMessage(1, "author1", "content1", someMap, 1)
102
+    jGraph.addMessage(2, "author2", "content2", someMap, 2)
103
+    jGraph.addMessage(3, "author3", "content3", someMap, 3)
104
+
105
+    jGraph.makeGraph()
106
+
107
+    val graph = jGraph.getGraph()
108
+
109
+    val contentCount = graph.vertices.filter{
110
+      case(id, message) => message.message == "content1"
111
+    }.count
112
+
113
+    val relationToRootCount = graph.edges.filter(e => e.srcId < e.dstId).count
114
+
115
+
116
+    println("Total messages with content 1: " + contentCount + "\n" +
117
+      "Total edges going to higher node id: " + relationToRootCount)
118
+  }
119
+
120
+  def parseSingleTweetTest(){
121
+
122
+    //Create MPC
123
+    val path = "Examples/JsonSingleTweet.txt"
124
+    val parserCaller = new ParserCaller(session)
125
+    val classifier = parserCaller.parseJson(path)
126
+    //Create graph
127
+    val tGraph = new TwitterGraph(context, classifier)
128
+    tGraph.parseConversationFromFile(path)
129
+    val line = Source.fromFile(path).getLines().next()
130
+    println("Let's parse the next tweet: \n" + line)
131
+    tGraph.parseTweet(line)
132
+  }
133
+
134
+  def parseTweetConversation(){
135
+    //Create MPC
136
+    val path = "Examples/JsonConversationTweets.txt"
137
+    val parserCaller = new ParserCaller(session)
138
+    val classifier = parserCaller.parseJson(path)
139
+    //Create graph
140
+    val tGraph = new TwitterGraph(context, classifier)
141
+    tGraph.parseConversationFromFile(path)
142
+  }
143
+
144
+  def multireferenceTweetConversation(){
145
+    println("[TEST]: Let's parse MultiReferenceConversation")
146
+
147
+    //Create MPC
148
+    val path = "Examples/JsonMultireferenceConversation.txt"
149
+    val parserCaller = new ParserCaller(session)
150
+    val classifier = parserCaller.parseJson(path)
151
+    //Create graph
152
+    val tGraph = new TwitterGraph(context, classifier)
153
+    tGraph.parseConversationFromFile(path)
154
+  }
155
+
156
+  def parseRedditConversation(){
157
+    println("[TEST]: Let's parse Reddit conversation from file")
158
+    val rGraph = new RedditGraph(context)
159
+    val filePath = "Examples/FakeRedditConversation.txt"
160
+    rGraph.parseRedditConversationFromFile(filePath)
161
+  }
162
+
163
+  def updatePunctuation(){
164
+
165
+    val jGraph = new SocialGraph(context)
166
+
167
+    //Make the connections, both systems are equivalent
168
+
169
+    jGraph.parseConnection("{\"origin\":1," +
170
+                            "\"destination\":2," +
171
+                            "\"intention\":\"noone\"}")
172
+    jGraph.parseConnection("{\"origin\":1," +
173
+                            "\"destination\":3," +
174
+                            "\"intention\":\"noone\"}")
175
+
176
+    //Make the messages identifier, author and Json with properties)
177
+
178
+    jGraph.parseMessage("{\"id\":1," +
179
+                         "\"weight\":1," +
180
+                         "\"author\":\"author1\"," +
181
+                         "\"message\":\"content1\"," +
182
+                         "\"properties\":{\"property\":\"something\"}}")
183
+    jGraph.parseMessage("{\"id\":2," +
184
+                         "\"weight\":1," +
185
+                         "\"author\":\"author2\"," +
186
+                         "\"message\":\"content1\"," +
187
+                         "\"properties\":{\"property\":\"something\"}}")
188
+    jGraph.parseMessage("{\"id\":3," +
189
+                         "\"weight\":1," +
190
+                         "\"author\":\"author3\"," +
191
+                         "\"message\":\"content2\"," +
192
+                         "\"properties\":{\"property\":\"something\"}}")
193
+
194
+    jGraph.makeGraph()
195
+
196
+    val graph = jGraph.getGraph()
197
+
198
+    val contentCount = graph.vertices.filter{
199
+      case(id, message) => message.message == "content1"
200
+    }.count
201
+
202
+    val relationToRootCount = graph.edges.filter(e => e.srcId < e.dstId).count
203
+
204
+
205
+    println("Total messages with content 1: " + contentCount + "\n" +
206
+      "Total edges going to higher node id: " + relationToRootCount)
207
+
208
+    val facts: RDD[String] =
209
+    graph.triplets.map(triplet =>
210
+    triplet.srcAttr.author + " with " + triplet.srcAttr.weight + " is " + triplet.attr + " with " + triplet.dstAttr.author + " with " + triplet.dstAttr.weight)
211
+
212
+    //Update values
213
+    jGraph.updatePunctuation(1, 4)
214
+    jGraph.makeGraph()
215
+    val newFacts: RDD[String] =
216
+    jGraph.getGraph().triplets.map(triplet =>
217
+    triplet.srcAttr.author + " with " + triplet.srcAttr.weight + " is " + triplet.attr + " with " + triplet.dstAttr.author + " with " + triplet.dstAttr.weight)
218
+
219
+    println("Old values...")
220
+    facts.collect.foreach(println(_))
221
+    println("New values...")
222
+    newFacts.collect.foreach(println(_))
223
+
224
+  }
225
+
226
+}

+ 185
- 0
distributed_social_network_analyzer/src/main/scala/Tests/DistributedGraphTests.scala Zobrazit soubor

@@ -0,0 +1,185 @@
1
+//SparkContext
2
+import org.apache.spark.sql.SparkSession
3
+import org.apache.spark.SparkContext
4
+import org.apache.spark.SparkConf
5
+
6
+//GraphX TODO: TEST ONLY
7
+import org.apache.spark._
8
+import org.apache.spark.graphx._
9
+import org.apache.spark.rdd.RDD
10
+
11
+//Lists
12
+import scala.collection.mutable.ListBuffer
13
+
14
+//Read files
15
+import scala.io.Source
16
+import sys.process._
17
+import java.io._
18
+
19
+class DistributedGraphTests (session: SparkSession){
20
+
21
+  val context = session.sparkContext
22
+
23
+  def listTriplets(){
24
+    val testGraph : TwitterGraph = getMultiReferenceConversationGraph()
25
+    val graph = testGraph.getGraph()
26
+    val facts: RDD[String] =
27
+    graph.triplets.map(triplet =>
28
+    triplet.srcAttr.author + " is " + triplet.attr + " with " + triplet.dstAttr.author)
29
+    facts.collect.foreach(println(_))
30
+  }
31
+
32
+  def twitterDistanceTest(){
33
+    println("[DISTRIBUTED_TEST]: Pregel test")
34
+    val testGraph : TwitterGraph = getMultiReferenceConversationGraph()
35
+    println("Max distance of the graph is " + testGraph.maxDistance()
36
+         +"\nMin distance of the graph is " + testGraph.minDistance()
37
+         +"\nAverage distance is " + testGraph.averageDistance())
38
+  }
39
+
40
+  def distancesTest(){
41
+    //Create a graph with min depth 2 and max depth 3
42
+    val sGraph : SocialGraph = new SocialGraph(context)
43
+    val someMap : Map[String, Any] = Map("Something" -> "Content")
44
+    sGraph.addMessage(1, "author1", "content1", someMap)
45
+    sGraph.addMessage(2, "author2", "content2", someMap)
46
+    sGraph.addMessage(3, "author3", "content3", someMap)
47
+    sGraph.addMessage(4, "author4", "content4", someMap)
48
+    sGraph.addMessage(5, "author5", "content5", someMap)
49
+    sGraph.addMessage(6, "author6", "content6", someMap)
50
+    sGraph.addMessage(7, "author7", "content7", someMap)
51
+    sGraph.addMessage(8, "author8", "content8", someMap)
52
+
53
+    sGraph.addConnection(2, 1, "noone")
54
+    sGraph.addConnection(3, 1, "noone")
55
+    sGraph.addConnection(4, 2, "noone")
56
+    sGraph.addConnection(5, 2, "noone")
57
+    sGraph.addConnection(6, 3, "noone")
58
+    sGraph.addConnection(7, 3, "noone")
59
+    sGraph.addConnection(8, 5, "noone")
60
+
61
+    println("Min distance of the graph is " + sGraph.minDistance()
62
+         +"\nMax distance of the graph is " + sGraph.maxDistance()
63
+         +"\nAverage distance is " + sGraph.averageDistance())
64
+  }
65
+
66
+  def distancesTestAlt(){
67
+    //Create a graph with min depth 2 and max depth 3
68
+    val sGraph : SocialGraph = new SocialGraph(context)
69
+    val someMap : Map[String, Any] = Map("Something" -> "Content")
70
+    sGraph.addMessage(1,  "author1",  "content1",  someMap)
71
+    sGraph.addMessage(2,  "author2",  "content2",  someMap)
72
+    sGraph.addMessage(3,  "author3",  "content3",  someMap)
73
+    sGraph.addMessage(4,  "author4",  "content4",  someMap)
74
+    sGraph.addMessage(5,  "author5",  "content5",  someMap)
75
+    sGraph.addMessage(6,  "author6",  "content6",  someMap)
76
+    sGraph.addMessage(7,  "author7",  "content7",  someMap)
77
+    sGraph.addMessage(8,  "author8",  "content8",  someMap)
78
+    sGraph.addMessage(9,  "author9",  "content9",  someMap)
79
+    sGraph.addMessage(10, "author10", "content10", someMap)
80
+
81
+    sGraph.addConnection(2,  1, "noone")
82
+    sGraph.addConnection(3,  1, "noone")
83
+    sGraph.addConnection(4,  2, "noone")
84
+    sGraph.addConnection(5,  2, "noone")
85
+    sGraph.addConnection(6,  2, "noone")
86
+    sGraph.addConnection(7,  5, "noone")
87
+    sGraph.addConnection(8,  5, "noone")
88
+    sGraph.addConnection(9,  3, "noone")
89
+    sGraph.addConnection(10, 3, "noone")
90
+
91
+
92
+    println("Min distance of the graph is " + sGraph.minDistance()
93
+         +"\nMax distance of the graph is " + sGraph.maxDistance()
94
+         +"\nAverage distance is " + sGraph.averageDistance())
95
+  }
96
+
97
+  def defeatersCountTest(){
98
+    //Create a graph with min depth 2 and max depth 3
99
+    val sGraph : SocialGraph = new SocialGraph(context)
100
+    val someMap : Map[String, Any] = Map("Something" -> "Content")
101
+    sGraph.addMessage(1,  "author1",  "content1",  someMap)
102
+    sGraph.addMessage(2,  "author2",  "content2",  someMap)
103
+    sGraph.addMessage(3,  "author3",  "content3",  someMap)
104
+    sGraph.addMessage(4,  "author4",  "content4",  someMap)
105
+    sGraph.addMessage(5,  "author5",  "content5",  someMap)
106
+    sGraph.addMessage(6,  "author6",  "content6",  someMap)
107
+    sGraph.addMessage(7,  "author7",  "content7",  someMap)
108
+    sGraph.addMessage(8,  "author8",  "content8",  someMap)
109
+    sGraph.addMessage(9,  "author9",  "content9",  someMap)
110
+    sGraph.addMessage(10, "author10", "content10", someMap)
111
+
112
+    sGraph.addConnection(2,  1, "noone")
113
+    sGraph.addConnection(3,  1, "noone")
114
+    sGraph.addConnection(4,  2, "noone")
115
+    sGraph.addConnection(5,  2, "noone")
116
+    sGraph.addConnection(6,  2, "noone")
117
+    sGraph.addConnection(7,  5, "noone")
118
+    sGraph.addConnection(8,  5, "noone")
119
+    sGraph.addConnection(9,  3, "noone")
120
+    sGraph.addConnection(10, 3, "noone")
121
+
122
+    val calcs: RelationshipsCalcs = new RelationshipsCalcs()
123
+    calcs.calcDefeaters(sGraph)
124
+  }
125
+
126
+  def optimizedDefeatersCountTest(){
127
+    //Create a graph with min depth 2 and max depth 3
128
+    val sGraph : SocialGraph = new SocialGraph(context)
129
+    val someMap : Map[String, Any] = Map("Something" -> "Content")
130
+    sGraph.addMessage(1,  "author1",  "content1",  someMap)
131
+    sGraph.addMessage(2,  "author2",  "content2",  someMap)
132
+    sGraph.addMessage(3,  "author3",  "content3",  someMap)
133
+    sGraph.addMessage(4,  "author4",  "content4",  someMap)
134
+    sGraph.addMessage(5,  "author5",  "content5",  someMap)
135
+    sGraph.addMessage(6,  "author6",  "content6",  someMap)
136
+    sGraph.addMessage(7,  "author7",  "content7",  someMap)
137
+    sGraph.addMessage(8,  "author8",  "content8",  someMap)
138
+    sGraph.addMessage(9,  "author9",  "content9",  someMap)
139
+    sGraph.addMessage(10, "author10", "content10", someMap)
140
+
141
+    sGraph.addConnection(2,  1, "noone")
142
+    sGraph.addConnection(3,  1, "noone")
143
+    sGraph.addConnection(4,  2, "noone")
144
+    sGraph.addConnection(5,  2, "noone")
145
+    sGraph.addConnection(6,  2, "noone")
146
+    sGraph.addConnection(7,  5, "noone")
147
+    sGraph.addConnection(8,  5, "noone")
148
+    sGraph.addConnection(9,  3, "noone")
149
+    sGraph.addConnection(10, 3, "noone")
150
+
151
+    val calcs: RelationshipsCalcs = new RelationshipsCalcs()
152
+    calcs.optimizedCalcDefeaters(sGraph)
153
+  }
154
+
155
+  def downloadConversation(){
156
+    val testGraph : TwitterGraph = getMultiReferenceConversationGraph()
157
+  }
158
+
159
+  def downloadRedditConversation(){
160
+    val downloader = new RedditDownloader(context)
161
+    val testGraph : RedditGraph = downloader.getGraphFromConversationId("63w4sq","Downloaders/reddit-collector/reddit.json")
162
+  }
163
+
164
+  private def getMultiReferenceConversationGraph(): TwitterGraph = {
165
+    storedTestGraph match {
166
+      case Some(i) => i
167
+      case None =>  {
168
+        val downloader = new TwitterDownloader(session.sparkContext)
169
+        val path = downloader.downloadAndStoreConversation("574324656905281538")
170
+    
171
+        //Create MPC
172
+        val parserCaller = new ParserCaller(session)
173
+        val classifier = parserCaller.parseJson(path)
174
+        //Create graph
175
+        val testGraph = new TwitterGraph(session.sparkContext, classifier)
176
+        testGraph.parseConversationFromFile(path)
177
+        storedTestGraph = Some(testGraph)
178
+        testGraph
179
+      }
180
+    }
181
+  }
182
+
183
+  var storedTestGraph : Option[TwitterGraph] = None
184
+
185
+}

+ 46
- 0
distributed_social_network_analyzer/src/main/scala/Tests/GraphMPCTests.scala Zobrazit soubor

@@ -0,0 +1,46 @@
1
+//SparkContext
2
+import org.apache.spark.sql.SparkSession
3
+import org.apache.spark.sql.DataFrame
4
+import scala.collection.mutable.ListBuffer
5
+import org.apache.spark.SparkContext
6
+import org.apache.spark.SparkConf
7
+
8
+//GraphX TODO: TEST ONLY
9
+import org.apache.spark._
10
+import org.apache.spark.graphx._
11
+import org.apache.spark.rdd.RDD
12
+
13
+//Lists
14
+import scala.collection.mutable.ListBuffer
15
+
16
+//Read files
17
+import scala.io.Source
18
+import sys.process._
19
+import java.io._
20
+
21
+class GraphMPCTests (session: SparkSession){
22
+
23
+  def listTriplets(id: String){
24
+    val downloader = new TwitterDownloader(session.sparkContext)
25
+    val path = downloader.downloadAndStoreConversation(id)
26
+
27
+    //Create MPC
28
+    val parserCaller = new ParserCaller(session)
29
+    val classifier = parserCaller.parseJson(path)
30
+    //Create graph
31
+    val testGraph = new TwitterGraph(session.sparkContext, classifier)
32
+    testGraph.parseConversationFromFile(path)
33
+    val graph = testGraph.getGraph()
34
+    val facts: RDD[String] =
35
+    graph.triplets.map(triplet =>
36
+    triplet.srcAttr.author + " is " + triplet.attr + " with " + triplet.dstAttr.author)
37
+    facts.collect.foreach(println(_))
38
+  }
39
+
40
+  /*
41
+   Some conversations:
42
+   835171254852202496
43
+
44
+  */
45
+
46
+}

+ 100
- 0
distributed_social_network_analyzer/src/main/scala/Tests/SerializationTests.scala Zobrazit soubor

@@ -0,0 +1,100 @@
1
+//SparkContext
2
+import org.apache.spark.sql.SparkSession
3
+import org.apache.spark.SparkContext
4
+import org.apache.spark.SparkConf
5
+
6
+//GraphX TODO: TEST ONLY
7
+import org.apache.spark._
8
+import org.apache.spark.graphx._
9
+import org.apache.spark.rdd.RDD
10
+
11
+//Lists
12
+import scala.collection.mutable.ListBuffer
13
+
14
+//Read files
15
+import scala.io.Source
16
+
17
+class SerializationTests(session: SparkSession){
18
+
19
+  val context = session.sparkContext
20
+
21
+  def graphSerailizingTest(){
22
+    val jGraph = new SocialGraph(context)
23
+    jGraph.addConnection(1, 2, "noone")
24
+    jGraph.addConnection(1, 3, "noone")
25
+    val someMap : Map[String, Any] = Map("Something" -> "Content")
26
+    jGraph.addMessage(1, "author1", "content1", someMap)
27
+    jGraph.addMessage(2, "author2", "content2", someMap)
28
+    jGraph.addMessage(3, "author3", "content3", someMap)
29
+
30
+    jGraph.makeGraph()
31
+
32
+    val graph = jGraph.getGraph()
33
+
34
+    val contentCount = graph.vertices.filter{
35
+      case(id, message) => message.message == "content1"
36
+    }.count
37
+
38
+    val relationToRootCount = graph.edges.filter(e => e.srcId < e.dstId).count
39
+
40
+
41
+    println("Total messages with content 1: " + contentCount + "\n" +
42
+      "Total edges going to higher node id: " + relationToRootCount)
43
+    jGraph.storeGraph("Storage/Graphs/", "Test1")
44
+
45
+    val lGraph = new SocialGraph(context)
46
+    lGraph.loadGraph("Storage/Graphs/", "Test1")
47
+    lGraph.makeGraph()
48
+
49
+    val loaded_graph = lGraph.getGraph()
50
+
51
+    val loaded_contentCount = loaded_graph.vertices.filter{
52
+      case(id, message) => message.message == "content1"
53
+    }.count
54
+
55
+    val loaded_relationToRootCount = loaded_graph.edges.filter(e => e.srcId < e.dstId).count
56
+
57
+
58
+    println("[LOADED GRAPH]Total messages with content 1: " + loaded_contentCount + "\n" +
59
+      "[LOADED GRAPH]Total edges going to higher node id: " + loaded_relationToRootCount)
60
+  }
61
+
62
+  def twitterStoringTest(){
63
+    println("[SERIALIZATION_TEST]: Storing Twitter graph")
64
+    val testGraph : TwitterGraph = getMultiReferenceConversationGraph()
65
+    println("[SERIALIZATION_TEST]: Twitter graph downloaded, storging...")
66
+    testGraph.storeGraph("Storage/Graphs/", "TwitterTest")
67
+    println("[SERIALIZATION_TEST]: stored, loading from storage")
68
+    val lGraph = new SocialGraph(context)
69
+    lGraph.loadGraph("Storage/Graphs/", "TwitterTest")
70
+    lGraph.makeGraph()
71
+
72
+    println("Max distance of the graph is " + testGraph.maxDistance()
73
+         +"\nMin distance of the graph is " + testGraph.minDistance()
74
+         +"\nAverage distance is " + testGraph.averageDistance())
75
+   println("Max distance of the LOADED graph is " + lGraph.maxDistance()
76
+        +"\nMin distance of the LOADED graph is " + lGraph.minDistance()
77
+        +"\nAverage distance LOADED is " + lGraph.averageDistance())
78
+  }
79
+
80
+  private def getMultiReferenceConversationGraph(): TwitterGraph = {
81
+    storedTestGraph match {
82
+      case Some(i) => i
83
+      case None =>  {
84
+        val downloader = new TwitterDownloader(session.sparkContext)
85
+        val path = downloader.downloadAndStoreConversation("574324656905281538")
86
+        //Create MPC
87
+        val parserCaller = new ParserCaller(session)
88
+        val classifier = parserCaller.parseJson(path)
89
+        //Create graph
90
+        val testGraph = new TwitterGraph(session.sparkContext, classifier)
91
+        testGraph.parseConversationFromFile(path)
92
+        storedTestGraph = Some(testGraph)
93
+        testGraph
94
+      }
95
+    }
96
+  }
97
+
98
+  var storedTestGraph : Option[TwitterGraph] = None
99
+
100
+}

+ 480
- 0
smallexamples/Deguindos-c5-logweights.xml Zobrazit soubor

@@ -0,0 +1,480 @@
1
+<entailment-corpus>
2
+  <argument-list maxweight="5" minweight="3">
3
+    <arg emoticon="none" hashlist="[]" id="590480494636179456" lang="es" numhashs="0" numurls="0" sentiment="0" topic="conv-c5-Deguindos" urllist="[]" weight="5">@Ilias_a @Marxista_esp en 2014, 24% y en 2015 23%</arg>
4
+    <arg emoticon="none" hashlist="[]" id="590480675385507840" lang="es" numhashs="0" numurls="0" sentiment="0" topic="conv-c5-Deguindos" urllist="[]" weight="3">@juanrallo y estamos en 2016? @Marxista_esp</arg>
5
+    <arg emoticon="none" hashlist="[]" id="590480949516840960" lang="es" numhashs="0" numurls="0" sentiment="0" topic="conv-c5-Deguindos" urllist="[]" weight="5">@Ilias_a @Marxista_esp s&#237; hemos estado en 2014 (24%) y s&#237; estamos en 2015 (23%)</arg>
6
+    <arg emoticon="none" hashlist="[]" id="590484317673037824" lang="es" numhashs="0" numurls="0" sentiment="0" topic="conv-c5-Deguindos" urllist="[]" weight="3">@juanrallo si, pero si se aplica a ejercicio 2015 se paga en 2016 @Marxista_esp</arg>
7
+    <arg emoticon="none" hashlist="[]" id="590484586448211968" lang="es" numhashs="0" numurls="0" sentiment="0" topic="conv-c5-Deguindos" urllist="[]" weight="5">@Ilias_a @Marxista_esp Pero ya se est&#225; devengando.</arg>
8
+    <arg emoticon="none" hashlist="[]" id="590484707697172480" lang="es" numhashs="0" numurls="0" sentiment="0" topic="conv-c5-Deguindos" urllist="[]" weight="3">@juanrallo si, 4 meses. Eso explica la historia impositiva de Dinamarca @Marxista_esp</arg>
9
+    <arg emoticon="none" hashlist="[]" id="590484974027075584" lang="es" numhashs="0" numurls="0" sentiment="0" topic="conv-c5-Deguindos" urllist="[]" weight="5">@Ilias_a @Marxista_esp Explica que se maltrata menos a la empresas que aqu&#237;.</arg>
10
+    <arg emoticon="none" hashlist="[]" id="590485435106906112" lang="es" numhashs="0" numurls="0" sentiment="0" topic="conv-c5-Deguindos" urllist="[]" weight="3">@juanrallo el tipo reducido en Espa&#241;a es un 25% y tb se va a bajar @Marxista_esp</arg>
11
+    <arg emoticon="none" hashlist="[]" id="590485945889251328" lang="es" numhashs="0" numurls="0" sentiment="-1" topic="conv-c5-Deguindos" urllist="[]" weight="5">@Ilias_a @Marxista_esp En Dinamarca el no reducido es el 24%</arg>
12
+    <arg emoticon="none" hashlist="[]" id="590486764147576832" lang="es" numhashs="0" numurls="0" sentiment="0" topic="conv-c5-Deguindos" urllist="[]" weight="3">@juanrallo sabe usted que en Espa&#241;a se puede pagar un 10% de sociedades sin contar deducciones? @Marxista_esp</arg>
13
+    <arg emoticon="none" hashlist="[]" id="590487344848961536" lang="es" numhashs="0" numurls="0" sentiment="0" topic="conv-c5-Deguindos" urllist="[]" weight="5">@Ilias_a @juanrallo @Marxista_esp e incluso un 5% afirmado por el mismisimo DeGuindos! :O</arg>
14
+    <arg emoticon="none" hashlist="[]" id="590487439002710016" lang="es" numhashs="0" numurls="0" sentiment="0" topic="conv-c5-Deguindos" urllist="[]" weight="5">@TrabajarEuropa @Ilias_a @Marxista_esp Qu&#233; fe tienen en mentirosos profesionales!</arg>
15
+    <arg emoticon="positive" emoticon-positive-counter="1" hashlist="[]" id="590487744603893761" lang="es" numhashs="0" numurls="0" sentiment="0" topic="conv-c5-Deguindos" urllist="[]" weight="5">@juanrallo Si no te crees a un Ministro ... Apaga y vamonos :) habra que hablar con un Experto Fiscaliza @Remo_ ;)  @Ilias_a @Marxista_esp</arg>
16
+    <arg emoticon="none" hashlist="[]" id="590488016088657921" lang="es" numhashs="0" numurls="0" sentiment="0" topic="conv-c5-Deguindos" urllist="[]" weight="5">@TrabajarEuropa @Remo_ @Ilias_a @Marxista_esp Jaja. Tambi&#233;n te cre&#237;as a Rato supongo.</arg>
17
+    <arg emoticon="none" hashlist="[]" id="590488286591848448" lang="es" numhashs="0" numurls="0" sentiment="-1" topic="conv-c5-Deguindos" urllist="[]" weight="5">@juanrallo cuando daba datos fiscales del Gobierno no es para creerlo? @Remo_ @Ilias_a @Marxista_esp</arg>
18
+    <arg emoticon="none" hashlist="[]" id="590489251113992192" lang="es" numhashs="0" numurls="0" sentiment="0" topic="conv-c5-Deguindos" urllist="[]" weight="5">@TrabajarEuropa @Remo_ @Ilias_a @Marxista_esp El gobiernos tiene inter&#233;s en maximizar los impuestos. Es parte interesada.</arg>
19
+    <arg emoticon="none" hashlist="[]" id="590489338514907136" lang="en" numhashs="0" numurls="1" sentiment="0" topic="conv-c5-Deguindos" urllist="[u'http://ec.europa.eu/eurostat/tgm/table.do?tab=table&amp;init=1&amp;language=en&amp;pcode=tec00020&amp;plugin=1']" weight="3">@TrabajarEuropa @juanrallo Taxes on production and imports SPA 11% DEN 16% http://t.co/aJ8a1BdIYL @Remo_ @Marxista_esp @TrabajarEuropa</arg>
20
+    <arg emoticon="none" hashlist="[]" id="590489516974153728" lang="es" numhashs="0" numurls="0" sentiment="-2" topic="conv-c5-Deguindos" urllist="[]" weight="3">@juanrallo no, si detr&#225;s del gobierno hay organizaciones privadas haciendo lobby  @TrabajarEuropa @Remo_ @Marxista_esp</arg>
21
+    <arg emoticon="none" hashlist="[]" id="590489610930761728" lang="es" numhashs="0" numurls="0" sentiment="0" topic="conv-c5-Deguindos" urllist="[]" weight="5">@Ilias_a @TrabajarEuropa @Remo_ @Marxista_esp Jaja, eso son los impuestos indirectos tipo IVA. En fin, qu&#233; nivel.</arg>
22
+    <arg emoticon="none" hashlist="[]" id="590490256740392960" lang="es" numhashs="0" numurls="1" sentiment="-1" topic="conv-c5-Deguindos" urllist="[u'http://ec.europa.eu/eurostat/tgm/web/table/description.jsp']" weight="3">@juanrallo perd&#243;n pero en la definici&#243;n no dice eso http://t.co/hSjHgg5x9w @TrabajarEuropa @Remo_ @Marxista_esp</arg>
23
+    <arg emoticon="none" hashlist="[]" id="590490593370988545" lang="es" numhashs="0" numurls="1" sentiment="-1" topic="conv-c5-Deguindos" urllist="[u'https://stats.oecd.org/glossary/detail.asp?ID=2670']" weight="5">@Ilias_a @TrabajarEuropa @Remo_ @Marxista_esp Impuestos sobre bienes cuando son vendidos? https://t.co/JJ6dvxbiyD no sabe qu&#233; es el IVA?</arg>
24
+    <arg emoticon="none" hashlist="[]" id="590490652137357312" lang="en" numhashs="0" numurls="0" sentiment="2" topic="conv-c5-Deguindos" urllist="[]" weight="3">@juanrallo employment of labour, the ownership or use of land, buildings or other assets used in production @TrabajarEuropa @Marxista_esp</arg>
25
+    <arg emoticon="none" hashlist="[]" id="590490781351288832" lang="es" numhashs="0" numurls="0" sentiment="-1" topic="conv-c5-Deguindos" urllist="[]" weight="3">@juanrallo IVA es un impuesto al consumo, no a la producci&#243;n @TrabajarEuropa @Remo_ @Marxista_esp</arg>
26
+    <arg emoticon="none" hashlist="[]" id="590490886334771200" lang="es" numhashs="0" numurls="0" sentiment="-1" topic="conv-c5-Deguindos" urllist="[]" weight="3">@juanrallo y no creo q el impuesto al trabajo, o al uso de tierra sea IVA @TrabajarEuropa @Remo_ @Marxista_esp</arg>
27
+    <arg emoticon="none" hashlist="[]" id="590491046469095424" lang="es" numhashs="0" numurls="0" sentiment="0" topic="conv-c5-Deguindos" urllist="[]" weight="5">@Ilias_a @TrabajarEuropa @Marxista_esp el impuesto de Sociedades es un impuesto sobre el trabajo?</arg>
28
+    <arg emoticon="none" hashlist="[]" id="590491264816144384" lang="es" numhashs="0" numurls="0" sentiment="0" topic="conv-c5-Deguindos" urllist="[]" weight="5">@Ilias_a @TrabajarEuropa @Remo_ @Marxista_esp Es sobre el Valor A&#241;adido. El Valor A&#241;adido de producci&#243;n. En fin. Nivel.</arg>
29
+    <arg emoticon="none" hashlist="[]" id="590491281186562048" lang="es" numhashs="0" numurls="0" sentiment="0" topic="conv-c5-Deguindos" urllist="[]" weight="3">@juanrallo le remito a la parte inicial del debate donde usted dec&#237;a q Dinamarca s&#243;lo paga IRPF e IVA @TrabajarEuropa @Marxista_esp</arg>
30
+    <arg emoticon="none" hashlist="[]" id="590491359095685122" lang="es" numhashs="0" numurls="0" sentiment="-1" topic="conv-c5-Deguindos" urllist="[]" weight="5">@Ilias_a @TrabajarEuropa @Remo_ @Marxista_esp Lo que desde luego no es, es Sociedades.</arg>
31
+    <arg emoticon="none" hashlist="[]" id="590491426976354304" lang="es" numhashs="0" numurls="0" sentiment="-1" topic="conv-c5-Deguindos" urllist="[]" weight="3">@juanrallo no al trabajo, pero si un impuesto sobre la capacidad productiva @TrabajarEuropa @Marxista_esp</arg>
32
+    <arg emoticon="none" hashlist="[]" id="590491512514969600" lang="es" numhashs="0" numurls="0" sentiment="0" topic="conv-c5-Deguindos" urllist="[]" weight="5">@Ilias_a @TrabajarEuropa @Marxista_esp Ah bien, que ahora tratamos de escurrir el bulto.</arg>
33
+    <arg emoticon="none" hashlist="[]" id="590491561814839296" lang="es" numhashs="0" numurls="0" sentiment="-1" topic="conv-c5-Deguindos" urllist="[]" weight="3">@juanrallo si, pero lo paga el consumidor. No es un impuesto a la producci&#243;n @TrabajarEuropa @Remo_ @Marxista_esp</arg>
34
+    <arg emoticon="none" hashlist="[]" id="590491619222233088" lang="es" numhashs="0" numurls="0" sentiment="0" topic="conv-c5-Deguindos" urllist="[]" weight="4">@TrabajarEuropa @juanrallo @Ilias_a @Marxista_esp un 10% sin deducciones ni ajustes? Me lo expliquen</arg>
35
+    <arg emoticon="none" hashlist="[]" id="590491664621432833" lang="es" numhashs="0" numurls="0" sentiment="0" topic="conv-c5-Deguindos" urllist="[]" weight="3">@juanrallo el IVA es un impuesto al consumo,@TrabajarEuropa @Remo_ @Marxista_esp</arg>
36
+    <arg emoticon="none" hashlist="[]" id="590491690491850752" lang="es" numhashs="0" numurls="0" sentiment="0" topic="conv-c5-Deguindos" urllist="[]" weight="5">@Ilias_a @TrabajarEuropa @Marxista_esp Falso. Es un impuesto sobre beneficios. Sobre la RENTA neta empresarial.</arg>
37
+    <arg emoticon="none" hashlist="[]" id="590491854099128320" lang="es" numhashs="0" numurls="0" sentiment="-1" topic="conv-c5-Deguindos" urllist="[]" weight="3">@juanrallo Dinamarca no es Canarias ? eso lo habr&#225; hecho usted con los del 1pp  entre DIN y ESP @TrabajarEuropa @Marxista_esp</arg>
38
+    <arg emoticon="none" hashlist="[]" id="590491950526156800" lang="es" numhashs="0" numurls="0" sentiment="0" topic="conv-c5-Deguindos" urllist="[]" weight="5">@Ilias_a @TrabajarEuropa @Remo_ @Marxista_esp Jaja. Qu&#233; tendr&#225; que ver quien sea el obligado tributario con cu&#225;l sea la base imponible?</arg>
39
+    <arg emoticon="none" hashlist="[]" id="590492057027903489" lang="es" numhashs="0" numurls="0" sentiment="0" topic="conv-c5-Deguindos" urllist="[]" weight="5">@Ilias_a @TrabajarEuropa @Remo_ @Marxista_esp un impuesto sobre la producci&#243;n que paga el consumidor.</arg>
40
+    <arg emoticon="positive" emoticon-positive-counter="1" hashlist="[]" id="590492219255169024" lang="es" medialist="[u'http://twitter.com/TrabajarEuropa/status/590492219255169024/photo/1']" numhashs="0" nummedia="1" numurls="0" sentiment="1" topic="conv-c5-Deguindos" urllist="[]" weight="5">@Ilias_a @juanrallo @Marxista_esp en fin... Seria este grafico... Supongo :) http://t.co/g3kLWKqx3k</arg>
41
+    <arg emoticon="none" hashlist="[]" id="590492230248435712" lang="es" numhashs="0" numurls="1" sentiment="0" topic="conv-c5-Deguindos" urllist="[u'http://trueeconomics.blogspot.com.es/2011/06/20062011-europes-corporate-tax-rates.html?m=1']" weight="5">@Ilias_a @TrabajarEuropa @Marxista_esp Much&#237;simo m&#225;s en tipos efectivos: http://t.co/911L3fbTM1</arg>
42
+    <arg emoticon="none" hashlist="[]" id="590492347827429376" lang="es" numhashs="0" numurls="0" sentiment="-2" topic="conv-c5-Deguindos" urllist="[]" weight="3">@juanrallo no , no es as&#237; @TrabajarEuropa @Remo_ @Marxista_esp</arg>
43
+    <arg emoticon="positive" emoticon-positive-counter="1" hashlist="[]" id="590492417540939776" lang="es" numhashs="0" numurls="0" sentiment="1" topic="conv-c5-Deguindos" urllist="[]" weight="5">@TrabajarEuropa @Ilias_a @Marxista_esp y d&#243;nde sale Dinamarca en ese gr&#225;fico? :)</arg>
44
+    <arg emoticon="none" hashlist="[]" id="590492470498177024" lang="es" numhashs="0" numurls="0" sentiment="-1" topic="conv-c5-Deguindos" urllist="[]" weight="3">@juanrallo quien paga el IVA no es la empresa, es el consumidor @TrabajarEuropa @Remo_ @Marxista_esp</arg>
45
+    <arg emoticon="none" hashlist="[]" id="590492660193976320" lang="es" numhashs="0" numurls="0" sentiment="0" topic="conv-c5-Deguindos" urllist="[]" weight="5">@juanrallo jeje es que ahi dice que en ESP es del 40!!! Jajaja imagina... :-P @Ilias_a @Marxista_esp</arg>
46
+    <arg emoticon="none" hashlist="[]" id="590492672751771649" lang="es" numhashs="0" numurls="0" sentiment="0" topic="conv-c5-Deguindos" urllist="[]" weight="5">@Ilias_a @TrabajarEuropa @Remo_ @Marxista_esp qu&#233; es el valor a&#241;adido?</arg>
47
+    <arg emoticon="none" hashlist="[]" id="590492798199160832" lang="es" numhashs="0" numurls="0" sentiment="0" topic="conv-c5-Deguindos" urllist="[]" weight="5">@Ilias_a @TrabajarEuropa @Remo_ @Marxista_esp jaja. Y d&#243;nde dice que los impuestos sobre la producci&#243;n los pague la empresa?Se est&#225; luciendo</arg>
48
+    <arg emoticon="none" hashlist="[]" id="590492915761356801" lang="es" numhashs="0" numurls="0" sentiment="-1" topic="conv-c5-Deguindos" urllist="[]" weight="5">@TrabajarEuropa @Ilias_a @Marxista_esp Ah. Ah&#237; ya no se cree a Guindos? Qu&#233; cosas.</arg>
49
+    <arg emoticon="none" hashlist="[]" id="590493032157421569" lang="es" numhashs="0" numurls="0" sentiment="0" topic="conv-c5-Deguindos" urllist="[]" weight="3">@juanrallo jejeje, pero los costes laborales y los impuestos s/ la tierra a producir suele ser que si @TrabajarEuropa @Remo_ @Marxista_esp</arg>
50
+    <arg emoticon="none" hashlist="[]" id="590493253461475328" lang="es" numhashs="0" numurls="0" sentiment="-1" topic="conv-c5-Deguindos" urllist="[]" weight="5">@Ilias_a @TrabajarEuropa @Remo_ @Marxista_esp O no. El IRPF es un impuesto sobre las rentas salariales y no lo paga la empresa.</arg>
51
+    <arg emoticon="none" hashlist="[]" id="590493330997452800" lang="es" numhashs="0" numurls="0" sentiment="-3" topic="conv-c5-Deguindos" urllist="[]" weight="5">@juanrallo no es que no me lo crea... Es que han metido ahi todo tipo de cosas que no son SOLO el tipo sobre Bcios @Ilias_a @Marxista_esp</arg>
52
+    <arg emoticon="none" hashlist="[]" id="590493498979328000" lang="es" numhashs="0" numurls="0" sentiment="-1" topic="conv-c5-Deguindos" urllist="[]" weight="5">@TrabajarEuropa @Ilias_a @Marxista_esp y si no le gusta ese gr&#225;fico, para qu&#233; me lo env&#237;a? Ruido?</arg>
53
+    <arg emoticon="none" hashlist="[]" id="590493617128665089" lang="es" numhashs="0" numurls="0" sentiment="0" topic="conv-c5-Deguindos" urllist="[]" weight="5">@juanrallo en todo caso que cosas... Somos el 11 con tasas totales MAS BAJAS! :-P jajaja @Ilias_a @Marxista_esp</arg>
54
+    <arg emoticon="positive" emoticon-positive-counter="1" hashlist="[]" id="590493729057812480" lang="es" numhashs="0" numurls="0" sentiment="1" topic="conv-c5-Deguindos" urllist="[]" weight="5">@juanrallo pues porque me confundi! :)</arg>
55
+    <arg emoticon="none" hashlist="[]" id="590493831356882945" lang="es" numhashs="0" numurls="0" sentiment="0" topic="conv-c5-Deguindos" urllist="[]" weight="5">@TrabajarEuropa @Ilias_a @Marxista_esp tasas? Mezcla impuestos con tasas? Y discut&#237;amos Espa&#241;a vs. Dinamarca.</arg>
56
+    <arg emoticon="none" hashlist="[]" id="590494029277716480" lang="es" numhashs="0" numurls="0" sentiment="0" topic="conv-c5-Deguindos" urllist="[]" weight="5">@TrabajarEuropa Ah ya, que lo envi&#243; antes de fijarse d&#243;nde estaba Dinamarca.</arg>
57
+    <arg emoticon="none" hashlist="[u'EU']" id="590495047709229056" lang="es" numhashs="1" numurls="1" sentiment="0" topic="conv-c5-Deguindos" urllist="[u'http://ec.europa.eu/taxation_customs/resources/documents/common/publications/studies/final_report_2014_taxud_2013_cc_120.pdf']" weight="5">@juanrallo @Ilias_a @Marxista_esp bueno... Si quereis ver tipo EFECTIVO en #EU bucear por las mas de 3.00 paginas de http://t.co/wtvlxhNIOu</arg>
58
+    <arg emoticon="none" hashlist="[]" id="590496429631746048" lang="es" medialist="[u'http://twitter.com/TrabajarEuropa/status/590496429631746048/photo/1']" numhashs="0" nummedia="1" numurls="1" sentiment="-1" topic="conv-c5-Deguindos" urllist="[u'http://ec.europa.eu/transparency/regexpert/index.cfm?do=groupDetail.groupDetailDoc&amp;id=11838&amp;no=3']" weight="5">@juanrallo @Ilias_a @Marxista_esp lo unico que he encontrado es este graf. http://t.co/l4W2pK8mAJ pero no esta Denn http://t.co/Qeq2ZZbVni</arg>
59
+    <arg emoticon="none" hashlist="[]" id="590497227090571264" lang="es" medialist="[u'http://twitter.com/Ilias_a/status/590497227090571264/photo/1']" numhashs="0" nummedia="1" numurls="0" sentiment="0" topic="conv-c5-Deguindos" urllist="[]" weight="3">@juanrallo mire esta tabla, OCDE imp s/ b&#186; de las corporaciones en % del PIB DIN 3,3% @TrabajarEuropa @Marxista_esp http://t.co/Auhalax6Ey</arg>
60
+    <arg emoticon="none" hashlist="[]" id="590497405122048000" lang="es" medialist="[u'http://twitter.com/Ilias_a/status/590497405122048000/photo/1']" numhashs="0" nummedia="1" numurls="0" sentiment="0" topic="conv-c5-Deguindos" urllist="[]" weight="3">@juanrallo y ahora la misma tabla pero ESP= 2% seguimos con lo de jaja, jeje, jiji ?@TrabajarEuropa @Marxista_esp http://t.co/t6lSecAJIK</arg>
61
+    <arg emoticon="none" hashlist="[]" id="590497633510289408" lang="es" numhashs="0" numurls="0" sentiment="-1" topic="conv-c5-Deguindos" urllist="[]" weight="5">@Ilias_a pero te dira que eso depende del PIb... Que el quiere el tipo "oficial" jaja 
62
+
63
+No hay ningun oriundo de DK que nos pueda informar?</arg>
64
+    <arg emoticon="none" hashlist="[]" id="590498344830644225" lang="es" numhashs="0" numurls="0" sentiment="-1" topic="conv-c5-Deguindos" urllist="[]" weight="3">@TrabajarEuropa es un soci&#243;pata narcisista.... ese es el problema. Lo de Dinamarca no es Canarias fue el colmo</arg>
65
+    <arg emoticon="none" hashlist="[]" id="590498484077338624" lang="es" numhashs="0" numurls="0" sentiment="-1" topic="conv-c5-Deguindos" urllist="[]" weight="3">@TrabajarEuropa y fue el &#250;nico argumento para no responder ante lo del RIC</arg>
66
+    <arg emoticon="none" hashlist="[]" id="590498573499944960" lang="es" numhashs="0" numurls="0" sentiment="0" topic="conv-c5-Deguindos" urllist="[]" weight="3">@TrabajarEuropa porque en Canarias las empresas q se acogen a la RIC s&#243;lo pagan un 10% de sociedades</arg>
67
+    <arg emoticon="none" hashlist="[]" id="590501865936318464" lang="es" numhashs="0" numurls="0" sentiment="0" topic="conv-c5-Deguindos" urllist="[]" weight="3">@Remo_ pues con la RIC , pero tienes q estar en Canarias @TrabajarEuropa @juanrallo @Marxista_esp</arg>
68
+    <arg emoticon="none" hashlist="[]" id="590502238566670337" lang="es" numhashs="0" numurls="1" sentiment="0" topic="conv-c5-Deguindos" urllist="[u'http://www.emprenderencanarias.es/ref-intro/reserva-inversiones/']" weight="3">@Remo_  aqu&#237; est&#225; http://t.co/XUtP3k31I7 @TrabajarEuropa @juanrallo @Marxista_esp</arg>
69
+    <arg emoticon="none" hashlist="[]" id="590502313799868416" lang="es" numhashs="0" numurls="0" sentiment="2" topic="conv-c5-Deguindos" urllist="[]" weight="3">@Remo_ y ha ido como el culo @TrabajarEuropa @juanrallo @Marxista_esp</arg>
70
+    <arg emoticon="none" hashlist="[]" id="590502788964216832" lang="es" numhashs="0" numurls="0" sentiment="0" topic="conv-c5-Deguindos" urllist="[]" weight="3">@Remo_ mira la enorme fortaleza de la econom&#237;a canaria desde q se aplica la RIC (iron&#237;a) @TrabajarEuropa @juanrallo @Marxista_esp</arg>
71
+    <arg emoticon="none" hashlist="[]" id="590502934393315328" lang="es" numhashs="0" numurls="0" sentiment="0" topic="conv-c5-Deguindos" urllist="[]" weight="3">@Remo_ nuestras multinacionales dominan el mundo... (iron&#237;a) @TrabajarEuropa @juanrallo @Marxista_esp</arg>
72
+    <arg emoticon="positive" emoticon-positive-counter="1" hashlist="[]" id="590505662846791680" lang="es" numhashs="0" numurls="1" sentiment="1" topic="conv-c5-Deguindos" urllist="[u'http://elblogdedaniel.com/2015/02/14/mito-1-las-grandes-empresas-solo-tributan-un-3/']" weight="3">@TrabajarEuropa @Ilias_a @juanrallo @Marxista_esp si te interesa,escribo sobre el mito del "3%" :) http://t.co/h2doz5GksA</arg>
73
+    <arg emoticon="positive" emoticon-positive-counter="1" hashlist="[]" id="590507061496123392" lang="es" numhashs="0" numurls="0" sentiment="1" topic="conv-c5-Deguindos" urllist="[]" weight="5">@Daniel_Marbella gracias! Me lo apunto! A ver si indica que pagan el 25% :)  @Ilias_a @juanrallo @Marxista_esp</arg>
74
+    <arg emoticon="none" hashlist="[]" id="590508470773936128" lang="es" numhashs="0" numurls="0" sentiment="0" topic="conv-c5-Deguindos" urllist="[]" weight="3">@TrabajarEuropa que bueno, el art&#237;culo se lo pasa La Calle...  @Daniel_Marbella @juanrallo @Marxista_esp</arg>
75
+    <arg emoticon="none" hashlist="[]" id="590512258591105024" lang="es" numhashs="0" numurls="0" sentiment="0" topic="conv-c5-Deguindos" urllist="[]" weight="5">@Ilias_a @Daniel_Marbella huis.. El LaCalle que me tiene bloqueao?? :-P jajaja</arg>
76
+    <arg emoticon="none" hashlist="[]" id="590516047951835136" lang="es" numhashs="0" numurls="0" sentiment="0" topic="conv-c5-Deguindos" urllist="[]" weight="3">@TrabajarEuropa atenci&#243;n al siguiente zasca.....</arg>
77
+    <arg emoticon="none" hashlist="[]" id="590517879948369920" lang="es" numhashs="0" numurls="0" sentiment="-1" topic="conv-c5-Deguindos" urllist="[]" weight="3">@juanrallo tiene q ver mucho porque no se tiene q vender donde se produce  @TrabajarEuropa @Remo_ @Marxista_esp</arg>
78
+    <arg emoticon="positive" emoticon-positive-counter="1" hashlist="[]" id="590518373307572225" lang="es" numhashs="0" numurls="0" sentiment="1" topic="conv-c5-Deguindos" urllist="[]" weight="3">@TrabajarEuropa @Ilias_a @juanrallo @Marxista_esp gracias por apuntartelo :)</arg>
79
+    <arg emoticon="none" hashlist="[]" id="590538494688231425" lang="es" numhashs="0" numurls="0" sentiment="0" topic="conv-c5-Deguindos" urllist="[]" weight="4">@Ilias_a trampas al solitario las justas, que en Melilla tambi&#233;n se pagan muy pocos impuestos @TrabajarEuropa @juanrallo @Marxista_esp</arg>
80
+    <arg emoticon="none" hashlist="[]" id="590539899171897345" lang="es" numhashs="0" numurls="0" sentiment="-1" topic="conv-c5-Deguindos" urllist="[]" weight="3">@Remo_ si, pero Canarias son 2 millones de habitantes y no veo donde hay trampa @TrabajarEuropa @juanrallo @Marxista_esp</arg>
81
+    <arg emoticon="none" hashlist="[]" id="590540065576787971" lang="es" numhashs="0" numurls="0" sentiment="0" topic="conv-c5-Deguindos" urllist="[]" weight="3">@Remo_ un impuesto de sociedades del 10% en territorio espa&#241;ol , que trampa? @TrabajarEuropa @juanrallo @Marxista_esp</arg>
82
+    <arg emoticon="none" hashlist="[]" id="590540394649276417" lang="es" numhashs="0" numurls="0" sentiment="0" topic="conv-c5-Deguindos" urllist="[]" weight="3">@Remo_ y posiblemente Dinamarca con poblaci&#243;n d 5 millones sea + comparable a a 2 millones q a 45 @TrabajarEuropa @juanrallo @Marxista_esp</arg>
83
+    <arg emoticon="none" hashlist="[]" id="590540582637989889" lang="es" numhashs="0" numurls="0" sentiment="0" topic="conv-c5-Deguindos" urllist="[]" weight="3">@Remo_ y cualquier empresa del territorio espa&#241;ol puede colocar su sede fiscal donde quiera @TrabajarEuropa @juanrallo @Marxista_esp</arg>
84
+    <arg emoticon="none" hashlist="[]" id="590543705792208896" lang="es" numhashs="0" numurls="0" sentiment="-1" topic="conv-c5-Deguindos" urllist="[]" weight="4">@Ilias_a falso, el domicilio fiscal tiene unas reglas muy claras y no se puede poner donde quieras @TrabajarEuropa @juanrallo @Marxista_esp</arg>
85
+    <arg emoticon="none" hashlist="[]" id="590543815460704258" lang="es" numhashs="0" numurls="0" sentiment="0" topic="conv-c5-Deguindos" urllist="[]" weight="4">@Ilias_a digo donde quieras si tu centro efectivo o mayor inmovilizado est&#225; fuera de Canarias @TrabajarEuropa @juanrallo @Marxista_esp</arg>
86
+    <arg emoticon="none" hashlist="[]" id="590544220643061760" lang="es" numhashs="0" numurls="0" sentiment="0" topic="conv-c5-Deguindos" urllist="[]" weight="3">@Remo_ si, pero puedes poner una empresa v&#237;nculo y expatriar beneficios @TrabajarEuropa @juanrallo @Marxista_esp</arg>
87
+    <arg emoticon="none" hashlist="[]" id="590544581214744576" lang="es" numhashs="0" numurls="0" sentiment="0" topic="conv-c5-Deguindos" urllist="[]" weight="3">@Remo_ como operan las grandes con los para&#237;sos fiscales o Irlanda @TrabajarEuropa @juanrallo @Marxista_esp</arg>
88
+    <arg emoticon="none" hashlist="[]" id="590544652060733441" lang="es" numhashs="0" numurls="0" sentiment="-1" topic="conv-c5-Deguindos" urllist="[]" weight="3">@Remo_ por tanto no es falso @TrabajarEuropa @juanrallo @Marxista_esp</arg>
89
+    <arg emoticon="none" hashlist="[]" id="590544955967463427" lang="es" numhashs="0" numurls="0" sentiment="0" topic="conv-c5-Deguindos" urllist="[]" weight="4">@Ilias_a tampoco, aplica el art&#237;culo 18 de la LIS para operaciones vinculadas. @TrabajarEuropa @juanrallo @Marxista_esp</arg>
90
+    <arg emoticon="none" hashlist="[]" id="590546008968396800" lang="es" numhashs="0" numurls="0" sentiment="0" topic="conv-c5-Deguindos" urllist="[]" weight="3">@Remo_ en ese art&#237;culo se habla del valor contable y valor de mercado?? que tiene que ver? 
91
+@TrabajarEuropa @juanrallo @Marxista_esp</arg>
92
+    <arg emoticon="none" hashlist="[]" id="590546999075856384" lang="es" numhashs="0" numurls="0" sentiment="-2" topic="conv-c5-Deguindos" urllist="[]" weight="3">@Remo_ estar&#225;s refiriendote al art&#237;culo 8, y en el no dice q no se pueda dentro de Espa&#241;a @TrabajarEuropa @juanrallo @Marxista_esp</arg>
93
+    <arg emoticon="none" hashlist="[]" id="590547200545058816" lang="es" numhashs="0" numurls="0" sentiment="0" topic="conv-c5-Deguindos" urllist="[]" weight="3">@Remo_ y que yo sepa, de momento Canarias est&#225; dentro de Espa&#241;a @TrabajarEuropa @juanrallo @Marxista_esp</arg>
94
+    <arg emoticon="none" hashlist="[]" id="590547374365376512" lang="es" numhashs="0" numurls="0" sentiment="0" topic="conv-c5-Deguindos" urllist="[]" weight="3">@Remo_ lo que tu dices vale para operar desde un paraiso fiscal fuera de territorio espa&#241;ol @TrabajarEuropa @juanrallo @Marxista_esp</arg>
95
+    <arg emoticon="none" hashlist="[]" id="590556202477707265" lang="es" numhashs="0" numurls="1" sentiment="0" topic="conv-c5-Deguindos" urllist="[u'http://noticias.juridicas.com/base_datos/Fiscal/540273-l-27-2014-de-27-nov-impuesto-sobre-sociedades.html#a18']" weight="4">@Ilias_a no, me refiero a esto http://t.co/jnL3KyeRGW y la filial Canaria que planteas @TrabajarEuropa @juanrallo @Marxista_esp</arg>
96
+    <arg emoticon="none" hashlist="[]" id="590571044727054338" lang="es" numhashs="0" numurls="0" sentiment="0" topic="conv-c5-Deguindos" urllist="[]" weight="3">@Remo_ si tienes una filial en Canarias, puedes introducir ingresos de varias formas @TrabajarEuropa @juanrallo @Marxista_esp</arg>
97
+    <arg emoticon="none" hashlist="[]" id="590571312965427200" lang="es" numhashs="0" numurls="0" sentiment="0" topic="conv-c5-Deguindos" urllist="[]" weight="3">@Remo_ como servicios realizados por la filial Canaria @TrabajarEuropa @juanrallo @Marxista_esp</arg>
98
+    <arg emoticon="none" hashlist="[]" id="590571540925784064" lang="es" numhashs="0" numurls="0" sentiment="0" topic="conv-c5-Deguindos" urllist="[]" weight="3">@Remo_ a ver, hay empresas q ya lo hacen @TrabajarEuropa @juanrallo @Marxista_esp</arg>
99
+    <arg emoticon="none" hashlist="[]" id="590572057961857024" lang="es" numhashs="0" numurls="0" sentiment="0" topic="conv-c5-Deguindos" urllist="[]" weight="3">@Remo_ de hecho trabaj&#233; en una empresa donde lo hac&#237;amos al rev&#233;s desde Canarias a Barcelona @TrabajarEuropa @juanrallo @Marxista_esp</arg>
100
+    <arg emoticon="none" hashlist="[]" id="590572306466009090" lang="es" numhashs="0" numurls="0" sentiment="0" topic="conv-c5-Deguindos" urllist="[]" weight="3">@Remo_ en que parte del territorio imputas la actividad d una empresa q produce x toda Esp? @TrabajarEuropa @juanrallo @Marxista_esp</arg>
101
+  </argument-list>
102
+  <argument-pairs>
103
+    <pair entailment="ATTACKS" id="0" topic="conv-c5-Deguindos">
104
+      <t id="590480675385507840"/>
105
+      <h id="590480494636179456"/>
106
+    </pair>
107
+    <pair entailment="ATTACKS" id="1" topic="conv-c5-Deguindos">
108
+      <t id="590491359095685122"/>
109
+      <h id="590490886334771200"/>
110
+    </pair>
111
+    <pair entailment="ATTACKS" id="2" topic="conv-c5-Deguindos">
112
+      <t id="590540065576787971"/>
113
+      <h id="590538494688231425"/>
114
+    </pair>
115
+    <pair entailment="ATTACKS" id="3" topic="conv-c5-Deguindos">
116
+      <t id="590487439002710016"/>
117
+      <h id="590487344848961536"/>
118
+    </pair>
119
+    <pair entailment="SUPPORTS" id="4" topic="conv-c5-Deguindos">
120
+      <t id="590487344848961536"/>
121
+      <h id="590486764147576832"/>
122
+    </pair>
123
+    <pair entailment="ATTACKS" id="5" topic="conv-c5-Deguindos">
124
+      <t id="590488016088657921"/>
125
+      <h id="590487744603893761"/>
126
+    </pair>
127
+    <pair entailment="ATTACKS" id="6" topic="conv-c5-Deguindos">
128
+      <t id="590543815460704258"/>
129
+      <h id="590543705792208896"/>
130
+    </pair>
131
+    <pair entailment="ATTACKS" id="7" topic="conv-c5-Deguindos">
132
+      <t id="590544955967463427"/>
133
+      <h id="590544220643061760"/>
134
+    </pair>
135
+    <pair entailment="ATTACKS" id="8" topic="conv-c5-Deguindos">
136
+      <t id="590488286591848448"/>
137
+      <h id="590488016088657921"/>
138
+    </pair>
139
+    <pair entailment="ATTACKS" id="9" topic="conv-c5-Deguindos">
140
+      <t id="590538494688231425"/>
141
+      <h id="590501865936318464"/>
142
+    </pair>
143
+    <pair entailment="ATTACKS" id="10" topic="conv-c5-Deguindos">
144
+      <t id="590497633510289408"/>
145
+      <h id="590497227090571264"/>
146
+    </pair>
147
+    <pair entailment="ATTACKS" id="11" topic="conv-c5-Deguindos">
148
+      <t id="590492347827429376"/>
149
+      <h id="590492057027903489"/>
150
+    </pair>
151
+    <pair entailment="ATTACKS" id="12" topic="conv-c5-Deguindos">
152
+      <t id="590539899171897345"/>
153
+      <h id="590538494688231425"/>
154
+    </pair>
155
+    <pair entailment="ATTACKS" id="13" topic="conv-c5-Deguindos">
156
+      <t id="590497405122048000"/>
157
+      <h id="590492417540939776"/>
158
+    </pair>
159
+    <pair entailment="ATTACKS" id="14" topic="conv-c5-Deguindos">
160
+      <t id="590492660193976320"/>
161
+      <h id="590492417540939776"/>
162
+    </pair>
163
+    <pair entailment="ATTACKS" id="15" topic="conv-c5-Deguindos">
164
+      <t id="590546008968396800"/>
165
+      <h id="590544955967463427"/>
166
+    </pair>
167
+    <pair entailment="ATTACKS" id="16" topic="conv-c5-Deguindos">
168
+      <t id="590495047709229056"/>
169
+      <h id="590493831356882945"/>
170
+    </pair>
171
+    <pair entailment="ATTACKS" id="17" topic="conv-c5-Deguindos">
172
+      <t id="590491561814839296"/>
173
+      <h id="590491264816144384"/>
174
+    </pair>
175
+    <pair entailment="ATTACKS" id="18" topic="conv-c5-Deguindos">
176
+      <t id="590492057027903489"/>
177
+      <h id="590491664621432833"/>
178
+    </pair>
179
+    <pair entailment="SUPPORTS" id="19" topic="conv-c5-Deguindos">
180
+      <t id="590512258591105024"/>
181
+      <h id="590508470773936128"/>
182
+    </pair>
183
+    <pair entailment="SUPPORTS" id="20" topic="conv-c5-Deguindos">
184
+      <t id="590507061496123392"/>
185
+      <h id="590505662846791680"/>
186
+    </pair>
187
+    <pair entailment="ATTACKS" id="21" topic="conv-c5-Deguindos">
188
+      <t id="590492417540939776"/>
189
+      <h id="590492219255169024"/>
190
+    </pair>
191
+    <pair entailment="ATTACKS" id="22" topic="conv-c5-Deguindos">
192
+      <t id="590497227090571264"/>
193
+      <h id="590492417540939776"/>
194
+    </pair>
195
+    <pair entailment="ATTACKS" id="23" topic="conv-c5-Deguindos">
196
+      <t id="590490781351288832"/>
197
+      <h id="590489610930761728"/>
198
+    </pair>
199
+    <pair entailment="SUPPORTS" id="24" topic="conv-c5-Deguindos">
200
+      <t id="590505662846791680"/>
201
+      <h id="590487344848961536"/>
202
+    </pair>
203
+    <pair entailment="ATTACKS" id="25" topic="conv-c5-Deguindos">
204
+      <t id="590498484077338624"/>
205
+      <h id="590497633510289408"/>
206
+    </pair>
207
+    <pair entailment="SUPPORTS" id="26" topic="conv-c5-Deguindos">
208
+      <t id="590516047951835136"/>
209
+      <h id="590512258591105024"/>
210
+    </pair>
211
+    <pair entailment="SUPPORTS" id="27" topic="conv-c5-Deguindos">
212
+      <t id="590486764147576832"/>
213
+      <h id="590485945889251328"/>
214
+    </pair>
215
+    <pair entailment="ATTACKS" id="28" topic="conv-c5-Deguindos">
216
+      <t id="590572057961857024"/>
217
+      <h id="590556202477707265"/>
218
+    </pair>
219
+    <pair entailment="SUPPORTS" id="29" topic="conv-c5-Deguindos">
220
+      <t id="590540394649276417"/>
221
+      <h id="590538494688231425"/>
222
+    </pair>
223
+    <pair entailment="ATTACKS" id="30" topic="conv-c5-Deguindos">
224
+      <t id="590491619222233088"/>
225
+      <h id="590487744603893761"/>
226
+    </pair>
227
+    <pair entailment="ATTACKS" id="31" topic="conv-c5-Deguindos">
228
+      <t id="590490593370988545"/>
229
+      <h id="590490256740392960"/>
230
+    </pair>
231
+    <pair entailment="SUPPORTS" id="32" topic="conv-c5-Deguindos">
232
+      <t id="590502313799868416"/>
233
+      <h id="590491619222233088"/>
234
+    </pair>
235
+    <pair entailment="ATTACKS" id="33" topic="conv-c5-Deguindos">
236
+      <t id="590546999075856384"/>
237
+      <h id="590544955967463427"/>
238
+    </pair>
239
+    <pair entailment="ATTACKS" id="34" topic="conv-c5-Deguindos">
240
+      <t id="590492470498177024"/>
241
+      <h id="590492057027903489"/>
242
+    </pair>
243
+    <pair entailment="ATTACKS" id="35" topic="conv-c5-Deguindos">
244
+      <t id="590491426976354304"/>
245
+      <h id="590491046469095424"/>
246
+    </pair>
247
+    <pair entailment="ATTACKS" id="36" topic="conv-c5-Deguindos">
248
+      <t id="590490256740392960"/>
249
+      <h id="590489610930761728"/>
250
+    </pair>
251
+    <pair entailment="ATTACKS" id="37" topic="conv-c5-Deguindos">
252
+      <t id="590543705792208896"/>
253
+      <h id="590540582637989889"/>
254
+    </pair>
255
+    <pair entailment="ATTACKS" id="38" topic="conv-c5-Deguindos">
256
+      <t id="590547200545058816"/>
257
+      <h id="590544955967463427"/>
258
+    </pair>
259
+    <pair entailment="ATTACKS" id="39" topic="conv-c5-Deguindos">
260
+      <t id="590489251113992192"/>
261
+      <h id="590488286591848448"/>
262
+    </pair>
263
+    <pair entailment="ATTACKS" id="40" topic="conv-c5-Deguindos">
264
+      <t id="590485435106906112"/>
265
+      <h id="590484974027075584"/>
266
+    </pair>
267
+    <pair entailment="ATTACKS" id="41" topic="conv-c5-Deguindos">
268
+      <t id="590484586448211968"/>
269
+      <h id="590484317673037824"/>
270
+    </pair>
271
+    <pair entailment="NONE" id="42" topic="conv-c5-Deguindos">
272
+      <t id="590502238566670337"/>
273
+      <h id="590491619222233088"/>
274
+    </pair>
275
+    <pair entailment="ATTACKS" id="43" topic="conv-c5-Deguindos">
276
+      <t id="590502934393315328"/>
277
+      <h id="590502788964216832"/>
278
+    </pair>
279
+    <pair entailment="SUPPORTS" id="44" topic="conv-c5-Deguindos">
280
+      <t id="590540582637989889"/>
281
+      <h id="590538494688231425"/>
282
+    </pair>
283
+    <pair entailment="ATTACKS" id="45" topic="conv-c5-Deguindos">
284
+      <t id="590491281186562048"/>
285
+      <h id="590491046469095424"/>
286
+    </pair>
287
+    <pair entailment="ATTACKS" id="46" topic="conv-c5-Deguindos">
288
+      <t id="590547374365376512"/>
289
+      <h id="590544955967463427"/>
290
+    </pair>
291
+    <pair entailment="ATTACKS" id="47" topic="conv-c5-Deguindos">
292
+      <t id="590489338514907136"/>
293
+      <h id="590488286591848448"/>
294
+    </pair>
295
+    <pair entailment="ATTACKS" id="48" topic="conv-c5-Deguindos">
296
+      <t id="590544652060733441"/>
297
+      <h id="590543815460704258"/>
298
+    </pair>
299
+    <pair entailment="ATTACKS" id="49" topic="conv-c5-Deguindos">
300
+      <t id="590502788964216832"/>
301
+      <h id="590491619222233088"/>
302
+    </pair>
303
+    <pair entailment="ATTACKS" id="50" topic="conv-c5-Deguindos">
304
+      <t id="590490886334771200"/>
305
+      <h id="590489610930761728"/>
306
+    </pair>
307
+    <pair entailment="ATTACKS" id="51" topic="conv-c5-Deguindos">
308
+      <t id="590491690491850752"/>
309
+      <h id="590491426976354304"/>
310
+    </pair>
311
+    <pair entailment="ATTACKS" id="52" topic="conv-c5-Deguindos">
312
+      <t id="590493729057812480"/>
313
+      <h id="590493498979328000"/>
314
+    </pair>
315
+    <pair entailment="ATTACKS" id="53" topic="conv-c5-Deguindos">
316
+      <t id="590493498979328000"/>
317
+      <h id="590493330997452800"/>
318
+    </pair>
319
+    <pair entailment="ATTACKS" id="54" topic="conv-c5-Deguindos">
320
+      <t id="590492672751771649"/>
321
+      <h id="590492347827429376"/>
322
+    </pair>
323
+    <pair entailment="ATTACKS" id="55" topic="conv-c5-Deguindos">
324
+      <t id="590493831356882945"/>
325
+      <h id="590493617128665089"/>
326
+    </pair>
327
+    <pair entailment="ATTACKS" id="56" topic="conv-c5-Deguindos">
328
+      <t id="590496429631746048"/>
329
+      <h id="590493831356882945"/>
330
+    </pair>
331
+    <pair entailment="ATTACKS" id="57" topic="conv-c5-Deguindos">
332
+      <t id="590484317673037824"/>
333
+      <h id="590480949516840960"/>
334
+    </pair>
335
+    <pair entailment="ATTACKS" id="58" topic="conv-c5-Deguindos">
336
+      <t id="590489610930761728"/>
337
+      <h id="590489338514907136"/>
338
+    </pair>
339
+    <pair entailment="ATTACKS" id="59" topic="conv-c5-Deguindos">
340
+      <t id="590493032157421569"/>
341
+      <h id="590492798199160832"/>
342
+    </pair>
343
+    <pair entailment="ATTACKS" id="60" topic="conv-c5-Deguindos">
344
+      <t id="590491046469095424"/>
345
+      <h id="590490652137357312"/>
346
+    </pair>
347
+    <pair entailment="ATTACKS" id="61" topic="conv-c5-Deguindos">
348
+      <t id="590493330997452800"/>
349
+      <h id="590492915761356801"/>
350
+    </pair>
351
+    <pair entailment="ATTACKS" id="62" topic="conv-c5-Deguindos">
352
+      <t id="590571044727054338"/>
353
+      <h id="590556202477707265"/>
354
+    </pair>
355
+    <pair entailment="ATTACKS" id="63" topic="conv-c5-Deguindos">
356
+      <t id="590491950526156800"/>
357
+      <h id="590491561814839296"/>
358
+    </pair>
359
+    <pair entailment="ATTACKS" id="64" topic="conv-c5-Deguindos">
360
+      <t id="590494029277716480"/>
361
+      <h id="590493729057812480"/>
362
+    </pair>
363
+    <pair entailment="ATTACKS" id="65" topic="conv-c5-Deguindos">
364
+      <t id="590493253461475328"/>
365
+      <h id="590493032157421569"/>
366
+    </pair>
367
+    <pair entailment="ATTACKS" id="66" topic="conv-c5-Deguindos">
368
+      <t id="590501865936318464"/>
369
+      <h id="590491619222233088"/>
370
+    </pair>
371
+    <pair entailment="ATTACKS" id="67" topic="conv-c5-Deguindos">
372
+      <t id="590491854099128320"/>
373
+      <h id="590491512514969600"/>
374
+    </pair>
375
+    <pair entailment="ATTACKS" id="68" topic="conv-c5-Deguindos">
376
+      <t id="590485945889251328"/>
377
+      <h id="590485435106906112"/>
378
+    </pair>
379
+    <pair entailment="ATTACKS" id="69" topic="conv-c5-Deguindos">
380
+      <t id="590491264816144384"/>
381
+      <h id="590490781351288832"/>
382
+    </pair>
383
+    <pair entailment="SUPPORTS" id="70" topic="conv-c5-Deguindos">
384
+      <t id="590518373307572225"/>
385
+      <h id="590507061496123392"/>
386
+    </pair>
387
+    <pair entailment="SUPPORTS" id="71" topic="conv-c5-Deguindos">
388
+      <t id="590508470773936128"/>
389
+      <h id="590507061496123392"/>
390
+    </pair>
391
+    <pair entailment="ATTACKS" id="72" topic="conv-c5-Deguindos">
392
+      <t id="590492915761356801"/>
393
+      <h id="590492660193976320"/>
394
+    </pair>
395
+    <pair entailment="ATTACKS" id="73" topic="conv-c5-Deguindos">
396
+      <t id="590480949516840960"/>
397
+      <h id="590480675385507840"/>
398
+    </pair>
399
+    <pair entailment="SUPPORTS" id="74" topic="conv-c5-Deguindos">
400
+      <t id="590489516974153728"/>
401
+      <h id="590489251113992192"/>
402
+    </pair>
403
+    <pair entailment="ATTACKS" id="75" topic="conv-c5-Deguindos">
404
+      <t id="590498573499944960"/>
405
+      <h id="590497633510289408"/>
406
+    </pair>
407
+    <pair entailment="ATTACKS" id="76" topic="conv-c5-Deguindos">
408
+      <t id="590487744603893761"/>
409
+      <h id="590487439002710016"/>
410
+    </pair>
411
+    <pair entailment="ATTACKS" id="77" topic="conv-c5-Deguindos">
412
+      <t id="590571540925784064"/>
413
+      <h id="590556202477707265"/>
414
+    </pair>
415
+    <pair entailment="ATTACKS" id="78" topic="conv-c5-Deguindos">
416
+      <t id="590556202477707265"/>
417
+      <h id="590547374365376512"/>
418
+    </pair>
419
+    <pair entailment="ATTACKS" id="79" topic="conv-c5-Deguindos">
420
+      <t id="590484707697172480"/>
421
+      <h id="590484586448211968"/>
422
+    </pair>
423
+    <pair entailment="ATTACKS" id="80" topic="conv-c5-Deguindos">
424
+      <t id="590571312965427200"/>
425
+      <h id="590556202477707265"/>
426
+    </pair>
427
+    <pair entailment="ATTACKS" id="81" topic="conv-c5-Deguindos">
428
+      <t id="590491512514969600"/>
429
+      <h id="590491281186562048"/>
430
+    </pair>
431
+    <pair entailment="ATTACKS" id="82" topic="conv-c5-Deguindos">
432
+      <t id="590492798199160832"/>
433
+      <h id="590492470498177024"/>
434
+    </pair>
435
+    <pair entailment="SUPPORTS" id="83" topic="conv-c5-Deguindos">
436
+      <t id="590498344830644225"/>
437
+      <h id="590497633510289408"/>
438
+    </pair>
439
+    <pair entailment="ATTACKS" id="84" topic="conv-c5-Deguindos">
440
+      <t id="590572306466009090"/>
441
+      <h id="590556202477707265"/>
442
+    </pair>
443
+    <pair entailment="ATTACKS" id="85" topic="conv-c5-Deguindos">
444
+      <t id="590490652137357312"/>
445
+      <h id="590489610930761728"/>
446
+    </pair>
447
+    <pair entailment="SUPPORTS" id="86" topic="conv-c5-Deguindos">
448
+      <t id="590544581214744576"/>
449
+      <h id="590543815460704258"/>
450
+    </pair>
451
+    <pair entailment="ATTACKS" id="87" topic="conv-c5-Deguindos">
452
+      <t id="590492230248435712"/>
453
+      <h id="590491854099128320"/>
454
+    </pair>
455
+    <pair entailment="ATTACKS" id="88" topic="conv-c5-Deguindos">
456
+      <t id="590517879948369920"/>
457
+      <h id="590491950526156800"/>
458
+    </pair>
459
+    <pair entailment="ATTACKS" id="89" topic="conv-c5-Deguindos">
460
+      <t id="590484974027075584"/>
461
+      <h id="590484707697172480"/>
462
+    </pair>
463
+    <pair entailment="ATTACKS" id="90" topic="conv-c5-Deguindos">
464
+      <t id="590492219255169024"/>
465
+      <h id="590491281186562048"/>
466
+    </pair>
467
+    <pair entailment="SUPPORTS" id="91" topic="conv-c5-Deguindos">
468
+      <t id="590493617128665089"/>
469
+      <h id="590492915761356801"/>
470
+    </pair>
471
+    <pair entailment="ATTACKS" id="92" topic="conv-c5-Deguindos">
472
+      <t id="590544220643061760"/>
473
+      <h id="590543815460704258"/>
474
+    </pair>
475
+    <pair entailment="ATTACKS" id="93" topic="conv-c5-Deguindos">
476
+      <t id="590491664621432833"/>
477
+      <h id="590491264816144384"/>
478
+    </pair>
479
+  </argument-pairs>
480
+</entailment-corpus>

+ 4
- 0
smallexamples/Deguindos_c5.xml
Diff nebyl zobrazen, protože je příliš veliký
Zobrazit soubor


+ 77
- 0
smallexamples/PardoYRato-logweights.xml
Diff nebyl zobrazen, protože je příliš veliký
Zobrazit soubor


+ 13
- 0
smallexamples/Trillo_c8.xml
Diff nebyl zobrazen, protože je příliš veliký
Zobrazit soubor


Powered by TurnKey Linux.