==1096156== Memcheck, a memory error detector ==1096156== Copyright (C) 2002-2024, and GNU GPL'd, by Julian Seward et al. ==1096156== Using Valgrind-3.24.0 and LibVEX; rerun with -h for copyright info ==1096156== Command: /data/blackswan/ripley/R/R-devel-vg/bin/exec/R --vanilla --encoding=UTF-8 ==1096156== R Under development (unstable) (2026-01-19 r89308) -- "Unsuffered Consequences" Copyright (C) 2026 The R Foundation for Statistical Computing Platform: x86_64-pc-linux-gnu R is free software and comes with ABSOLUTELY NO WARRANTY. You are welcome to redistribute it under certain conditions. Type 'license()' or 'licence()' for distribution details. Natural language support but running in an English locale R is a collaborative project with many contributors. Type 'contributors()' for more information and 'citation()' on how to cite R or R packages in publications. Type 'demo()' for some demos, 'help()' for on-line help, or 'help.start()' for an HTML browser interface to help. Type 'q()' to quit R. > pkgname <- "dream" > source(file.path(R.home("share"), "R", "examples-header.R")) > options(warn = 1) > library('dream') > > base::assign(".oldSearch", base::search(), pos = 'CheckExEnv') > base::assign(".old_wd", base::getwd(), pos = 'CheckExEnv') > cleanEx() > nameEx("computeBCConstraint") > ### * computeBCConstraint > > flush(stderr()); flush(stdout()) > > ### Name: computeBCConstraint > ### Title: Compute Burchard and Cornwell's (2018) Two-Mode Constraint > ### Aliases: computeBCConstraint > > ### ** Examples > > > # For this example, we recreate Figure 2 in Burchard and Cornwell (2018: 13) > BCNet <- matrix( + c(1,1,0,0, + 1,0,1,0, + 1,0,0,1, + 0,1,1,1), + nrow = 4, ncol = 4, byrow = TRUE) > colnames(BCNet) <- c("1", "2", "3", "4") > rownames(BCNet) <- c("i", "j", "k", "m") > #library(sna) #To plot the two mode network, we use the sna R package > #gplot(BCNet, usearrows = FALSE, > # gmode = "twomode", displaylabels = TRUE) > computeBCConstraint(BCNet) Warning: `computeBCConstraint()` was deprecated in dream 1.0.0. ℹ Please use `netstats_tm_constraint()` instead. i j k m 0.7500000 0.7500000 0.7500000 0.3333333 > > #For this example, we recreate Figure 9 in Burchard and Cornwell (2018:18) for > #weighted two mode networks. > BCweighted <- matrix(c(1,2,1, 1,0,0, + 0,2,1,0,0,1), + nrow = 4, ncol = 3, + byrow = TRUE) > rownames(BCweighted) <- c("i", "j", "k", "l") > computeBCConstraint(BCweighted, weighted = TRUE) i j k l 0.8888889 1.0000000 1.7500000 2.0000000 > > > > > > > > cleanEx() > nameEx("computeBCES") > ### * computeBCES > > flush(stderr()); flush(stdout()) > > ### Name: computeBCES > ### Title: Compute Burchard and Cornwell's (2018) Two-Mode Effective Size > ### Aliases: computeBCES > > ### ** Examples > > > # For this example, we recreate Figure 2 in Burchard and Cornwell (2018: 13) > BCNet <- matrix( + c(1,1,0,0, + 1,0,1,0, + 1,0,0,1, + 0,1,1,1), + nrow = 4, ncol = 4, byrow = TRUE) > colnames(BCNet) <- c("1", "2", "3", "4") > rownames(BCNet) <- c("i", "j", "k", "m") > #library(sna) #To plot the two mode network, we use the sna R package > #gplot(BCNet, usearrows = FALSE, > # gmode = "twomode", displaylabels = TRUE) > computeBCES(BCNet) Warning: `computeBCES()` was deprecated in dream 1.0.0. ℹ Please use `netstats_tm_effective()` instead. i j k m 2.333333 2.333333 2.333333 3.000000 > > #In this example, we recreate Figure 9 in Burchard and Cornwell (2018:18) > #for weighted two mode networks. > BCweighted <- matrix(c(1,2,1, 1,0,0, + 0,2,1,0,0,1), + nrow = 4, ncol = 3, + byrow = TRUE) > rownames(BCweighted) <- c("i", "j", "k", "l") > computeBCES(BCweighted, weighted = TRUE) i j k l 2.444444 1.000000 1.166667 1.000000 > > > > > cleanEx() > nameEx("computeBCRedund") > ### * computeBCRedund > > flush(stderr()); flush(stdout()) > > ### Name: computeBCRedund > ### Title: Compute Burchard and Cornwell's (2018) Two-Mode Redundancy > ### Aliases: computeBCRedund > > ### ** Examples > > > # For this example, we recreate Figure 2 in Burchard and Cornwell (2018: 13) > BCNet <- matrix( + c(1,1,0,0, + 1,0,1,0, + 1,0,0,1, + 0,1,1,1), + nrow = 4, ncol = 4, byrow = TRUE) > colnames(BCNet) <- c("1", "2", "3", "4") > rownames(BCNet) <- c("i", "j", "k", "m") > #library(sna) #To plot the two mode network, we use the sna R package > #gplot(BCNet, usearrows = FALSE, > # gmode = "twomode", displaylabels = TRUE) > #this values replicate those reported by Burchard and Cornwell (2018: 14) > computeBCRedund(BCNet) Warning: `computeBCRedund()` was deprecated in dream 1.0.0. ℹ Please use `netstats_tm_redundancy()` instead. i j k m i 0.0000000 0.3333333 0.3333333 0 j 0.3333333 0.0000000 0.3333333 0 k 0.3333333 0.3333333 0.0000000 0 m 0.0000000 0.0000000 0.0000000 0 > > > #For this example, we recreate Figure 9 in Burchard and Cornwell (2018:18) > #for weighted two mode networks. > BCweighted <- matrix(c(1,2,1, 1,0,0, + 0,2,1,0,0,1), + nrow = 4, ncol = 3, + byrow = TRUE) > rownames(BCweighted) <- c("i", "j", "k", "l") > computeBCRedund(BCweighted, weighted = TRUE) i j k l i 0.0000000 0 0.2222222 0.3333333 j 0.0000000 0 NA NA k 0.3333333 NA 0.0000000 0.5000000 l 0.5000000 NA 0.5000000 0.0000000 > > > > > > cleanEx() > nameEx("computeBurtsConstraint") > ### * computeBurtsConstraint > > flush(stderr()); flush(stdout()) > > ### Name: computeBurtsConstraint > ### Title: Compute Burt's (1992) Constraint for Ego Networks from a > ### Sociomatrix > ### Aliases: computeBurtsConstraint > > ### ** Examples > > > # For this example, we recreate the ego network provided in Burt (1992: 56): > BurtEgoNet <- matrix(c( + 0,1,0,0,1,1,1, + 1,0,0,1,0,0,1, + 0,0,0,0,0,0,1, + 0,1,0,0,0,0,1, + 1,0,0,0,0,0,1, + 1,0,0,0,0,0,1, + 1,1,1,1,1,1,0), + nrow = 7, ncol = 7) > colnames(BurtEgoNet) <- rownames(BurtEgoNet) <- c("A", "B", "C", "D", "E", + "F", "ego") > #the constraint value for the ego replicates that provided in Burt (1992: 56) > computeBurtsConstraint(BurtEgoNet) Warning: `computeBurtsConstraint()` was deprecated in dream 1.0.0. ℹ Please use `netstats_om_constraint()` instead. A B C D E F ego 0.6836 0.8395 1.0000 1.1250 1.1250 1.1250 0.4003 > > > > > > cleanEx() > nameEx("computeBurtsES") > ### * computeBurtsES > > flush(stderr()); flush(stdout()) > > ### Name: computeBurtsES > ### Title: Compute Burt's (1992) Effective Size for Ego Networks from a > ### Sociomatrix > ### Aliases: computeBurtsES > > ### ** Examples > > # For this example, we recreate the ego network provided in Borgatti (1997): > BorgattiEgoNet <- matrix( + c(0,1,0,0,0,0,0,0,1, + 1,0,0,0,0,0,0,0,1, + 0,0,0,1,0,0,0,0,1, + 0,0,1,0,0,0,0,0,1, + 0,0,0,0,0,1,0,0,1, + 0,0,0,0,1,0,0,0,1, + 0,0,0,0,0,0,0,1,1, + 0,0,0,0,0,0,1,0,1, + 1,1,1,1,1,1,1,1,0), + nrow = 9, ncol = 9, byrow = TRUE) > colnames(BorgattiEgoNet) <- rownames(BorgattiEgoNet) <- c("A", "B", "C", + "D", "E", "F", + "G", "H", "ego") > #the effective size value for the ego replicates that provided in Borgatti (1997) > computeBurtsES(BorgattiEgoNet) Warning: `computeBurtsES()` was deprecated in dream 1.0.0. ℹ Please use `netstats_om_effective()` instead. A B C D E F G H ego 1 1 1 1 1 1 1 1 7 > > # For this example, we recreate the ego network provided in Burt (1992: 56): > BurtEgoNet <- matrix(c( + 0,1,0,0,1,1,1, + 1,0,0,1,0,0,1, + 0,0,0,0,0,0,1, + 0,1,0,0,0,0,1, + 1,0,0,0,0,0,1, + 1,0,0,0,0,0,1, + 1,1,1,1,1,1,0), + nrow = 7, ncol = 7) > colnames(BurtEgoNet) <- rownames(BurtEgoNet) <- c("A", "B", "C", "D", "E", + "F", "ego") > #the effective size value for the ego replicates that provided in Burt (1992: 56) > computeBurtsES(BurtEgoNet) A B C D E F ego 2.500000 1.666667 1.000000 1.000000 1.000000 1.000000 4.666667 > > > > cleanEx() > nameEx("computeFourCycles") > ### * computeFourCycles > > flush(stderr()); flush(stdout()) > > ### Name: computeFourCycles > ### Title: Compute the Four-Cycles Network Statistic for Event Dyads in a > ### Relational Event Sequence > ### Aliases: computeFourCycles > > ### ** Examples > > data("WikiEvent2018.first100k") > WikiEvent2018 <- WikiEvent2018.first100k[1:1000,] #the first one thousand events > WikiEvent2018$time <- as.numeric(WikiEvent2018$time) #making the variable numeric > ### Creating the EventSet By Employing Case-Control Sampling With M = 5 and > ### Sampling from the Observed Event Sequence with P = 0.01 > EventSet <- processTMEventSeq( + data = WikiEvent2018, # The Event Dataset + time = WikiEvent2018$time, # The Time Variable + eventID = WikiEvent2018$eventID, # The Event Sequence Variable + sender = WikiEvent2018$user, # The Sender Variable + receiver = WikiEvent2018$article, # The Receiver Variable + p_samplingobserved = 0.01, # The Probability of Selection + n_controls = 8, # The Number of Controls to Sample from the Full Risk Set + seed = 9999) # The Seed for Replication Warning: `processTMEventSeq()` was deprecated in dream 1.0.0. ℹ Please use `create_riskset()` instead. > > #### Estimating the Four-Cycle Statistic Without the Sliding Windows Framework > EventSet$fourcycle <- computeFourCycles( + observed_time = WikiEvent2018$time, + observed_sender = WikiEvent2018$user, + observed_receiver = WikiEvent2018$article, + processed_time = EventSet$time, + processed_sender = EventSet$sender, + processed_receiver = EventSet$receiver, + halflife = 2.592e+09, #halflife parameter + dyadic_weight = 0, + Lerneretal_2013 = FALSE) Warning: `computeFourCycles()` was deprecated in dream 1.0.0. ℹ Please use `remstats_fourcycles()` instead. > > #### Estimating the Four-Cycle Statistic With the Sliding Windows Framework > EventSet$cycle4SW <- computeFourCycles( + observed_time = WikiEvent2018$time, + observed_sender = WikiEvent2018$user, + observed_receiver = WikiEvent2018$article, + processed_time = EventSet$time, + processed_sender = EventSet$sender, + processed_receiver = EventSet$receiver, + processed_seqIDs = EventSet$sequenceID, + halflife = 2.592e+09, #halflife parameter + dyadic_weight = 0, + sliding_window = TRUE, + Lerneretal_2013 = FALSE) Setting up data structure for the sliding windows framework....... > > #The results with and without the sliding windows are the same (see correlation > #below). Using the sliding windows method is recommended when the data are > #big' so that memory allotment is more efficient. > cor(EventSet$fourcycle, EventSet$cycle4SW) [1] 1 > > #### Estimating the Four-Cycle Statistic with the Counts of Events Returned > EventSet$cycle4C <- computeFourCycles( + observed_time = WikiEvent2018$time, + observed_sender = WikiEvent2018$user, + observed_receiver = WikiEvent2018$article, + processed_time = EventSet$time, + processed_sender = EventSet$sender, + processed_receiver = EventSet$receiver, + processed_seqIDs = EventSet$sequenceID, + halflife = 2.592e+09, #halflife parameter + dyadic_weight = 0, + sliding_window = FALSE, + counts = TRUE, + Lerneretal_2013 = FALSE) > > cbind(EventSet$fourcycle, + EventSet$cycle4SW, + EventSet$cycle4C) [,1] [,2] [,3] [1,] 0.0000000 0.0000000 0 [2,] 0.0000000 0.0000000 0 [3,] 0.0000000 0.0000000 0 [4,] 0.0000000 0.0000000 0 [5,] 0.0000000 0.0000000 0 [6,] 0.0000000 0.0000000 0 [7,] 0.0000000 0.0000000 0 [8,] 0.0000000 0.0000000 0 [9,] 0.0000000 0.0000000 0 [10,] 0.0000000 0.0000000 0 [11,] 0.0000000 0.0000000 0 [12,] 0.0000000 0.0000000 0 [13,] 0.0000000 0.0000000 0 [14,] 0.0000000 0.0000000 0 [15,] 0.0000000 0.0000000 0 [16,] 0.0000000 0.0000000 0 [17,] 0.0000000 0.0000000 0 [18,] 0.0000000 0.0000000 0 [19,] 0.0000000 0.0000000 0 [20,] 0.0000000 0.0000000 0 [21,] 0.9085797 0.9085797 1 [22,] 0.9613469 0.9613469 1 [23,] 0.0000000 0.0000000 0 [24,] 0.0000000 0.0000000 0 [25,] 0.0000000 0.0000000 0 [26,] 0.0000000 0.0000000 0 [27,] 0.0000000 0.0000000 0 [28,] 0.0000000 0.0000000 0 [29,] 0.0000000 0.0000000 0 [30,] 0.0000000 0.0000000 0 [31,] 0.0000000 0.0000000 0 [32,] 0.0000000 0.0000000 0 [33,] 0.0000000 0.0000000 0 [34,] 0.0000000 0.0000000 0 [35,] 0.0000000 0.0000000 0 [36,] 0.0000000 0.0000000 0 [37,] 0.0000000 0.0000000 0 [38,] 0.0000000 0.0000000 0 [39,] 0.0000000 0.0000000 0 [40,] 0.0000000 0.0000000 0 [41,] 0.0000000 0.0000000 0 [42,] 0.0000000 0.0000000 0 [43,] 0.0000000 0.0000000 0 [44,] 0.0000000 0.0000000 0 [45,] 0.0000000 0.0000000 0 [46,] 0.0000000 0.0000000 0 [47,] 0.0000000 0.0000000 0 [48,] 0.8052507 0.8052507 1 [49,] 0.5952885 0.5952885 1 [50,] 0.0000000 0.0000000 0 [51,] 0.0000000 0.0000000 0 [52,] 0.0000000 0.0000000 0 [53,] 0.0000000 0.0000000 0 [54,] 0.0000000 0.0000000 0 [55,] 0.0000000 0.0000000 0 [56,] 0.0000000 0.0000000 0 [57,] 0.0000000 0.0000000 0 [58,] 0.0000000 0.0000000 0 [59,] 0.0000000 0.0000000 0 [60,] 0.8721595 0.8721595 1 [61,] 0.0000000 0.0000000 0 [62,] 0.0000000 0.0000000 0 [63,] 0.0000000 0.0000000 0 [64,] 0.0000000 0.0000000 0 [65,] 0.0000000 0.0000000 0 [66,] 0.6145830 0.6145830 1 [67,] 0.0000000 0.0000000 0 [68,] 0.0000000 0.0000000 0 [69,] 0.0000000 0.0000000 0 [70,] 0.0000000 0.0000000 0 [71,] 0.4821051 0.4821051 1 [72,] 0.0000000 0.0000000 0 [73,] 0.0000000 0.0000000 0 [74,] 0.0000000 0.0000000 0 [75,] 0.0000000 0.0000000 0 [76,] 0.0000000 0.0000000 0 [77,] 0.0000000 0.0000000 0 [78,] 0.0000000 0.0000000 0 [79,] 0.0000000 0.0000000 0 [80,] 0.0000000 0.0000000 0 [81,] 0.6457861 0.6457861 1 [82,] 0.0000000 0.0000000 0 [83,] 0.0000000 0.0000000 0 [84,] 0.0000000 0.0000000 0 [85,] 0.0000000 0.0000000 0 [86,] 0.0000000 0.0000000 0 [87,] 0.0000000 0.0000000 0 [88,] 0.0000000 0.0000000 0 [89,] 0.0000000 0.0000000 0 [90,] 0.0000000 0.0000000 0 > > > > > cleanEx() > nameEx("computeHomFourCycles") > ### * computeHomFourCycles > > flush(stderr()); flush(stdout()) > > ### Name: computeHomFourCycles > ### Title: Compute Fujimoto, Snijders, and Valente's (2018) Homophilous > ### Four-Cycles for Two-Mode Networks > ### Aliases: computeHomFourCycles > > ### ** Examples > > > # For this example, we use the Davis Southern Women's Dataset. > data("southern.women") > #creating a random binary membership vector > set.seed(9999) > membership <- sample(0:1, nrow(southern.women), replace = TRUE) > #the homophilous four-cycle values > computeHomFourCycles(southern.women, mem = membership) Warning: `computeHomFourCycles()` was deprecated in dream 1.0.0. ℹ Please use `netstats_tm_homfourcycles()` instead. EVELYN LAURA THERESA BRENDA CHARLOTTE FRANCES ELEANOR PEARL 34 33 26 23 12 17 18 11 RUTH VERNE MYRA KATHERINE SYLVIA NORA HELEN DOROTHY 17 10 18 14 34 26 15 18 OLIVIA FLORA 1 1 > > > > cleanEx() > nameEx("computeISP") > ### * computeISP > > flush(stderr()); flush(stdout()) > > ### Name: computeISP > ### Title: Compute Butts' (2008) Incoming Shared Partners Network Statistic > ### for Event Dyads in a Relational Event Sequence > ### Aliases: computeISP > > ### ** Examples > > events <- data.frame(time = 1:18, + eventID = 1:18, + sender = c("A", "B", "C", + "A", "D", "E", + "F", "B", "A", + "F", "D", "B", + "G", "B", "D", + "H", "A", "D"), + target = c("B", "C", "D", + "E", "A", "F", + "D", "A", "C", + "G", "B", "C", + "H", "J", "A", + "F", "C", "B")) > > eventSet <- processOMEventSeq(data = events, + time = events$time, + eventID = events$eventID, + sender = events$sender, + receiver = events$target, + p_samplingobserved = 1.00, + n_controls = 1, + seed = 9999) Warning: `processOMEventSeq()` was deprecated in dream 1.0.0. ℹ Please use `create_riskset()` instead. > > # Computing Incoming Shared Partners Statistic without the sliding windows framework > eventSet$ISP <- computeISP( + observed_time = events$time, + observed_sender = events$sender, + observed_receiver = events$target, + processed_time = eventSet$time, + processed_sender = eventSet$sender, + processed_receiver = eventSet$receiver, + halflife = 2, #halflife parameter + dyadic_weight = 0, + Lerneretal_2013 = FALSE) Warning: `computeISP()` was deprecated in dream 1.0.0. ℹ Please use `remstats_triads()` instead. > > # Computing Incoming Shared Partners Statistic with the sliding windows framework > eventSet$ISP_SW <- computeISP( + observed_time = events$time, + observed_sender = events$sender, + observed_receiver = events$target, + processed_time = eventSet$time, + processed_sender = eventSet$sender, + processed_receiver = eventSet$receiver, + halflife = 2, #halflife parameter + processed_seqIDs = eventSet$sequenceID, + dyadic_weight = 0, + sliding_window = TRUE, + Lerneretal_2013 = FALSE) Setting up data structure for the sliding windows framework....... > > #The results with and without the sliding windows are the same (see correlation > #below). Using the sliding windows method is recommended when the data are > #big' so that memory allotment is more efficient. > cor(eventSet$ISP , eventSet$ISP_SW) [1] 1 > > # Computing Incoming Shared Partners Statistics with the counts of events being returned > eventSet$ISPC <- computeISP( + observed_time = events$time, + observed_sender = events$sender, + observed_receiver = events$target, + processed_time = eventSet$time, + processed_sender = eventSet$sender, + processed_receiver = eventSet$receiver, + halflife = 2, #halflife parameter + dyadic_weight = 0, + sliding_window = FALSE, + counts = TRUE, + Lerneretal_2013 = FALSE) > > cbind(eventSet$ISP, + eventSet$ISP_SW, + eventSet$ISPC) [,1] [,2] [,3] [1,] 0.00000000 0.00000000 0 [2,] 0.00000000 0.00000000 0 [3,] 0.00000000 0.00000000 0 [4,] 0.00000000 0.00000000 0 [5,] 0.00000000 0.00000000 0 [6,] 0.00000000 0.00000000 0 [7,] 0.00000000 0.00000000 0 [8,] 0.00000000 0.00000000 0 [9,] 0.00000000 0.00000000 0 [10,] 0.00000000 0.00000000 0 [11,] 0.00000000 0.00000000 0 [12,] 0.00000000 0.00000000 0 [13,] 0.21022410 0.21022410 1 [14,] 0.00000000 0.00000000 0 [15,] 0.00000000 0.00000000 0 [16,] 0.25000000 0.25000000 1 [17,] 0.25000000 0.25000000 1 [18,] 0.00000000 0.00000000 0 [19,] 0.07432544 0.07432544 1 [20,] 0.00000000 0.00000000 0 [21,] 0.00000000 0.00000000 0 [22,] 0.08838835 0.08838835 1 [23,] 0.00000000 0.00000000 0 [24,] 0.00000000 0.00000000 0 [25,] 0.00000000 0.00000000 0 [26,] 0.00000000 0.00000000 0 [27,] 0.00000000 0.00000000 0 [28,] 0.00000000 0.00000000 0 [29,] 0.17951758 0.17951758 1 [30,] 0.00000000 0.00000000 0 [31,] 0.00000000 0.00000000 0 [32,] 0.08975879 0.08975879 1 [33,] 0.00000000 0.00000000 0 [34,] 0.00000000 0.00000000 0 [35,] 0.17951758 0.17951758 1 > > > > cleanEx() > nameEx("computeITP") > ### * computeITP > > flush(stderr()); flush(stdout()) > > ### Name: computeITP > ### Title: Compute Butts' (2008) Incoming Two Paths Network Statistic for > ### Event Dyads in a Relational Event Sequence > ### Aliases: computeITP > > ### ** Examples > > events <- data.frame(time = 1:18, + eventID = 1:18, + sender = c("A", "B", "C", + "A", "D", "E", + "F", "B", "A", + "F", "D", "B", + "G", "B", "D", + "H", "A", "D"), + target = c("B", "C", "D", + "E", "A", "F", + "D", "A", "C", + "G", "B", "C", + "H", "J", "A", + "F", "C", "B")) > > eventSet <- processOMEventSeq(data = events, + time = events$time, + eventID = events$eventID, + sender = events$sender, + receiver = events$target, + p_samplingobserved = 1.00, + n_controls = 1, + seed = 9999) > > # Computing Incoming Two Paths Statistics without the sliding windows framework > eventSet$ITP <- computeITP( + observed_time = events$time, + observed_sender = events$sender, + observed_receiver = events$target, + processed_time = eventSet$time, + processed_sender = eventSet$sender, + processed_receiver = eventSet$receiver, + halflife = 2, #halflife parameter + dyadic_weight = 0, + Lerneretal_2013 = FALSE) Warning: `computeITP()` was deprecated in dream 1.0.0. ℹ Please use `remstats_triads()` instead. > > # Computing Incoming Two Paths Statistics with the sliding windows framework > eventSet$ITP_SW <- computeITP( + observed_time = events$time, + observed_sender = events$sender, + observed_receiver = events$target, + processed_time = eventSet$time, + processed_sender = eventSet$sender, + processed_receiver = eventSet$receiver, + halflife = 2, #halflife parameter + processed_seqIDs = eventSet$sequenceID, + dyadic_weight = 0, + sliding_window = TRUE, + Lerneretal_2013 = FALSE) Setting up data structure for the sliding windows framework....... > > #The results with and without the sliding windows are the same (see correlation > #below). Using the sliding windows method is recommended when the data are > #big' so that memory allotment is more efficient. > cor(eventSet$ITP, eventSet$ITP_SW) [1] 1 > > # Computing Incoming Shared Partners Statistics with the counts of events being returned > eventSet$ITPC <- computeITP( + observed_time = events$time, + observed_sender = events$sender, + observed_receiver = events$target, + processed_time = eventSet$time, + processed_sender = eventSet$sender, + processed_receiver = eventSet$receiver, + halflife = 2, #halflife parameter + dyadic_weight = 0, + sliding_window = FALSE, + counts = TRUE, + Lerneretal_2013 = FALSE) > > cbind(eventSet$ITP, + eventSet$ITP_SW, + eventSet$ITPC) [,1] [,2] [,3] [1,] 0.00000000 0.00000000 0 [2,] 0.00000000 0.00000000 0 [3,] 0.00000000 0.00000000 0 [4,] 0.00000000 0.00000000 0 [5,] 0.00000000 0.00000000 0 [6,] 0.00000000 0.00000000 0 [7,] 0.00000000 0.00000000 0 [8,] 0.00000000 0.00000000 0 [9,] 0.00000000 0.00000000 0 [10,] 0.00000000 0.00000000 0 [11,] 0.00000000 0.00000000 0 [12,] 0.00000000 0.00000000 0 [13,] 0.00000000 0.00000000 0 [14,] 0.00000000 0.00000000 0 [15,] 0.35355339 0.35355339 1 [16,] 0.17677670 0.17677670 1 [17,] 0.00000000 0.00000000 0 [18,] 0.00000000 0.00000000 0 [19,] 0.17677670 0.17677670 1 [20,] 0.05255603 0.05255603 1 [21,] 0.00000000 0.00000000 0 [22,] 0.17677670 0.17677670 1 [23,] 0.06250000 0.06250000 1 [24,] 0.00000000 0.00000000 0 [25,] 0.00000000 0.00000000 0 [26,] 0.00000000 0.00000000 0 [27,] 0.07547785 0.07547785 1 [28,] 0.04419417 0.04419417 1 [29,] 0.00000000 0.00000000 0 [30,] 0.21022410 0.21022410 1 [31,] 0.00000000 0.00000000 0 [32,] 0.06346905 0.06346905 1 [33,] 0.42044821 0.42044821 1 [34,] 0.02668545 0.02668545 1 [35,] 0.00000000 0.00000000 0 > > > > cleanEx() > nameEx("computeLealBrokerage") > ### * computeLealBrokerage > > flush(stderr()); flush(stdout()) > > ### Name: computeLealBrokerage > ### Title: Compute Potential for Intercultural Brokerage (PIB) Based on > ### Leal (2025) > ### Aliases: computeLealBrokerage > > ### ** Examples > > > # For this example, we recreate Figure 3 in Leal (2025) > LealNet <- matrix( c( + 0,1,0,0,0,0,0, + 1,0,1,1,0,0,0, + 0,1,0,0,1,1,0, + 0,1,0,0,1,0,0, + 0,0,1,1,0,0,0, + 0,0,1,0,0,0,1, + 0,0,0,0,0,1,0), + nrow = 7, ncol = 7, byrow = TRUE) > > colnames(LealNet) <- rownames(LealNet) <- c("A", "B", "C","D", + "E", "F", "G") > categorical_variable <- c(0,0,1,0,0,0,0) > #These values are exactly the same as reported by Leal (2025) > computeLealBrokerage(LealNet, + symmetric = TRUE, + g.mem = categorical_variable) Warning: `computeLealBrokerage()` was deprecated in dream 1.0.0. ℹ Please use `netstats_om_pib()` instead. A B C D E F G 0.0 1.5 0.0 0.0 0.5 1.0 0.0 > > > > > > > cleanEx() > nameEx("computeNPaths") > ### * computeNPaths > > flush(stderr()); flush(stdout()) > > ### Name: computeNPaths > ### Title: Compute the Number of Walks of Length K in a One-Mode Network > ### Aliases: computeNPaths > > ### ** Examples > > > # For this example, we generate a random one-mode graph with the sna package. > #creating the random network with 10 actors > set.seed(9999) > rnet <- matrix(sample(c(0,1), 10*10, replace = TRUE, prob = c(0.8,0.2)), + nrow = 10, ncol = 10, byrow = TRUE) > diag(rnet) <- 0 #setting self ties to 0 > #counting the paths of length 2 > computeNPaths(rnet, k = 2) Warning: `computeNPaths()` was deprecated in dream 1.0.0. ℹ Please use `netstats_om_nwalks()` instead. [,1] [,2] [,3] [,4] [,5] [,6] [,7] [,8] [,9] [,10] [1,] 0 2 0 0 1 0 0 2 0 1 [2,] 0 1 1 1 1 1 0 3 2 1 [3,] 1 0 0 0 0 1 1 0 1 0 [4,] 0 1 1 0 0 0 0 1 0 1 [5,] 1 1 0 0 0 1 1 0 1 0 [6,] 1 1 1 0 0 1 1 0 1 0 [7,] 0 0 0 0 1 0 0 2 1 1 [8,] 0 0 0 0 0 0 0 0 0 0 [9,] 0 0 0 0 0 0 0 0 0 0 [10,] 0 0 0 0 0 0 0 0 0 0 > #counting the paths of length 5 > computeNPaths(rnet, k = 5) [,1] [,2] [,3] [,4] [,5] [,6] [,7] [,8] [,9] [,10] [1,] 3 7 3 1 5 4 3 11 7 5 [2,] 4 8 4 3 5 7 4 13 11 5 [3,] 3 4 3 1 1 4 3 4 5 2 [4,] 1 4 2 1 3 2 1 7 4 3 [5,] 4 7 4 1 3 5 4 8 7 4 [6,] 4 8 5 2 4 6 4 11 9 5 [7,] 1 1 1 1 1 2 1 3 3 1 [8,] 0 0 0 0 0 0 0 0 0 0 [9,] 0 0 0 0 0 0 0 0 0 0 [10,] 0 0 0 0 0 0 0 0 0 0 > > > > cleanEx() > nameEx("computeOSP") > ### * computeOSP > > flush(stderr()); flush(stdout()) > > ### Name: computeOSP > ### Title: Compute Butts' (2008) Outgoing Shared Partners Network Statistic > ### for Event Dyads in a Relational Event Sequence > ### Aliases: computeOSP > > ### ** Examples > > events <- data.frame(time = 1:18, + eventID = 1:18, + sender = c("A", "B", "C", + "A", "D", "E", + "F", "B", "A", + "F", "D", "B", + "G", "B", "D", + "H", "A", "D"), + target = c("B", "C", "D", + "E", "A", "F", + "D", "A", "C", + "G", "B", "C", + "H", "J", "A", + "F", "C", "B")) > > eventSet <- processOMEventSeq(data = events, + time = events$time, + eventID = events$eventID, + sender = events$sender, + receiver = events$target, + p_samplingobserved = 1.00, + n_controls = 1, + seed = 9999) > > # Computing Outgoing Shared Partners Statistics without the sliding windows framework > eventSet$OSP <- computeOSP( + observed_time = events$time, + observed_sender = events$sender, + observed_receiver = events$target, + processed_time = eventSet$time, + processed_sender = eventSet$sender, + processed_receiver = eventSet$receiver, + halflife = 2, #halflife parameter + dyadic_weight = 0, + Lerneretal_2013 = FALSE) Warning: `computeOSP()` was deprecated in dream 1.0.0. ℹ Please use `remstats_triads()` instead. > > # Computing Outgoing Shared Partners Statistics with the sliding windows framework > eventSet$OSP_SW <- computeOSP( + observed_time = events$time, + observed_sender = events$sender, + observed_receiver = events$target, + processed_time = eventSet$time, + processed_sender = eventSet$sender, + processed_receiver = eventSet$receiver, + halflife = 2, #halflife parameter + processed_seqIDs = eventSet$sequenceID, + dyadic_weight = 0, + sliding_window = TRUE, + Lerneretal_2013 = FALSE) Setting up data structure for the sliding windows framework....... > > #The results with and without the sliding windows are the same (see correlation > #below). Using the sliding windows method is recommended when the data are > #big' so that memory allotment is more efficient. > cor(eventSet$OSP , eventSet$OSP_SW) [1] 1 > > # Computing Outgoing Shared Partners Statistics with the counts of events being returned > eventSet$OSP_C <- computeOSP( + observed_time = events$time, + observed_sender = events$sender, + observed_receiver = events$target, + processed_time = eventSet$time, + processed_sender = eventSet$sender, + processed_receiver = eventSet$receiver, + halflife = 2, #halflife parameter + dyadic_weight = 0, + sliding_window = FALSE, + counts = TRUE, + Lerneretal_2013 = FALSE) > > cbind(eventSet$OSP, + eventSet$OSP_SW, + eventSet$OSP_C) [,1] [,2] [,3] [1,] 0.00000000 0.00000000 0 [2,] 0.00000000 0.00000000 0 [3,] 0.00000000 0.00000000 0 [4,] 0.00000000 0.00000000 0 [5,] 0.00000000 0.00000000 0 [6,] 0.00000000 0.00000000 0 [7,] 0.00000000 0.00000000 0 [8,] 0.00000000 0.00000000 0 [9,] 0.00000000 0.00000000 0 [10,] 0.00000000 0.00000000 0 [11,] 0.00000000 0.00000000 0 [12,] 0.00000000 0.00000000 0 [13,] 0.00000000 0.00000000 0 [14,] 0.00000000 0.00000000 0 [15,] 0.00000000 0.00000000 0 [16,] 0.00000000 0.00000000 0 [17,] 0.00000000 0.00000000 0 [18,] 0.00000000 0.00000000 0 [19,] 0.17677670 0.17677670 1 [20,] 0.21022410 0.21022410 1 [21,] 0.00000000 0.00000000 0 [22,] 0.00000000 0.00000000 0 [23,] 0.00000000 0.00000000 0 [24,] 0.00000000 0.00000000 0 [25,] 0.00000000 0.00000000 0 [26,] 0.00000000 0.00000000 0 [27,] 0.00000000 0.00000000 0 [28,] 0.04419417 0.04419417 1 [29,] 0.00000000 0.00000000 0 [30,] 0.00000000 0.00000000 0 [31,] 0.00000000 0.00000000 0 [32,] 0.00000000 0.00000000 0 [33,] 0.02209709 0.02209709 1 [34,] 0.10674179 0.10674179 1 [35,] 0.00000000 0.00000000 0 > > > > cleanEx() > nameEx("computeOTP") > ### * computeOTP > > flush(stderr()); flush(stdout()) > > ### Name: computeOTP > ### Title: Compute Butts' (2008) Outgoing Two Paths Network Statistic for > ### Event Dyads in a Relational Event Sequence > ### Aliases: computeOTP > > ### ** Examples > > events <- data.frame(time = 1:18, + eventID = 1:18, + sender = c("A", "B", "C", + "A", "D", "E", + "F", "B", "A", + "F", "D", "B", + "G", "B", "D", + "H", "A", "D"), + target = c("B", "C", "D", + "E", "A", "F", + "D", "A", "C", + "G", "B", "C", + "H", "J", "A", + "F", "C", "B")) > > eventSet <- processOMEventSeq(data = events, + time = events$time, + eventID = events$eventID, + sender = events$sender, + receiver = events$target, + p_samplingobserved = 1.00, + n_controls = 1, + seed = 9999) > > # Computing Outgoing Two Paths Statistics without the sliding windows framework > eventSet$OTP <- computeOTP( + observed_time = events$time, + observed_sender = events$sender, + observed_receiver = events$target, + processed_time = eventSet$time, + processed_sender = eventSet$sender, + processed_receiver = eventSet$receiver, + halflife = 2, #halflife parameter + dyadic_weight = 0, + Lerneretal_2013 = FALSE) Warning: `computeOTP()` was deprecated in dream 1.0.0. ℹ Please use `remstats_triads()` instead. > > # Computing Outgoing Two Paths Statistics with the sliding windows framework > eventSet$OTP_SW <- computeOTP( + observed_time = events$time, + observed_sender = events$sender, + observed_receiver = events$target, + processed_time = eventSet$time, + processed_sender = eventSet$sender, + processed_receiver = eventSet$receiver, + halflife = 2, #halflife parameter + processed_seqIDs = eventSet$sequenceID, + dyadic_weight = 0, + sliding_window = TRUE, + Lerneretal_2013 = FALSE) Setting up data structure for the sliding windows framework....... > > #The results with and without the sliding windows are the same (see correlation > #below). Using the sliding windows method is recommended when the data are > #big' so that memory allotment is more efficient. > cor(eventSet$OTP , eventSet$OTP_SW) [1] 1 > > # Computing Outgoing Two Paths Statistics with the counts of events being returned > eventSet$OTPC <- computeOTP( + observed_time = events$time, + observed_sender = events$sender, + observed_receiver = events$target, + processed_time = eventSet$time, + processed_sender = eventSet$sender, + processed_receiver = eventSet$receiver, + halflife = 2, #halflife parameter + dyadic_weight = 0, + sliding_window = FALSE, + counts = TRUE, + Lerneretal_2013 = FALSE) > > cbind(eventSet$OTP, + eventSet$OTP_SW, + eventSet$OTPC) [,1] [,2] [,3] [1,] 0.00000000 0.00000000 0 [2,] 0.00000000 0.00000000 0 [3,] 0.00000000 0.00000000 0 [4,] 0.00000000 0.00000000 0 [5,] 0.59460356 0.59460356 1 [6,] 0.00000000 0.00000000 0 [7,] 0.00000000 0.00000000 0 [8,] 0.00000000 0.00000000 0 [9,] 0.00000000 0.00000000 0 [10,] 0.00000000 0.00000000 0 [11,] 0.35355339 0.35355339 1 [12,] 0.00000000 0.00000000 0 [13,] 0.00000000 0.00000000 0 [14,] 0.00000000 0.00000000 0 [15,] 0.50000000 0.50000000 1 [16,] 0.07432544 0.07432544 1 [17,] 0.00000000 0.00000000 0 [18,] 0.00000000 0.00000000 0 [19,] 0.29730178 0.29730178 1 [20,] 0.06250000 0.06250000 1 [21,] 0.21022410 0.21022410 1 [22,] 0.29730178 0.29730178 1 [23,] 0.00000000 0.00000000 0 [24,] 0.00000000 0.00000000 0 [25,] 0.06250000 0.06250000 1 [26,] 0.00000000 0.00000000 0 [27,] 0.06250000 0.06250000 1 [28,] 0.14865089 0.14865089 1 [29,] 0.00000000 0.00000000 0 [30,] 0.00000000 0.00000000 0 [31,] 0.00000000 0.00000000 0 [32,] 0.02668545 0.02668545 1 [33,] 0.00000000 0.00000000 0 [34,] 0.03173453 0.03173453 1 [35,] 0.00000000 0.00000000 0 > > > > cleanEx() > nameEx("computePersistence") > ### * computePersistence > > flush(stderr()); flush(stdout()) > > ### Name: computePersistence > ### Title: Compute Butts' (2008) Persistence Network Statistic for Event > ### Dyads in a Relational Event Sequence > ### Aliases: computePersistence > > ### ** Examples > > > > # A Dummy One-Mode Event Dataset > events <- data.frame(time = 1:18, + eventID = 1:18, + sender = c("A", "B", "C", + "A", "D", "E", + "F", "B", "A", + "F", "D", "B", + "G", "B", "D", + "H", "A", "D"), + target = c("B", "C", "D", + "E", "A", "F", + "D", "A", "C", + "G", "B", "C", + "H", "J", "A", + "F", "C", "B")) > > # Creating the Post-Processing Event Dataset with Null Events > eventSet <- processOMEventSeq(data = events, + time = events$time, + eventID = events$eventID, + sender = events$sender, + receiver = events$target, + p_samplingobserved = 1.00, + n_controls = 6, + seed = 9999) > > #Compute Persistence with respect to the sender's past relational history without > #the sliding windows framework and no temporal dependency > eventSet$persist <- computePersistence(observed_time = events$time, + observed_receiver = events$target, + observed_sender = events$sender, + processed_time = eventSet$time, + processed_receiver = eventSet$receiver, + processed_sender = eventSet$sender, + sender = TRUE, + nopastEvents = 0) Warning: `computePersistence()` was deprecated in dream 1.0.0. ℹ Please use `remstats_persistence()` instead. > > #Compute Persistence with respect to the sender's past relational history with > #the sliding windows framework and no temporal dependency > eventSet$persistSW <- computePersistence(observed_time = events$time, + observed_receiver = events$target, + observed_sender = events$sender, + processed_time = eventSet$time, + processed_receiver = eventSet$receiver, + processed_sender = eventSet$sender, + sender = TRUE, + sliding_windows = TRUE, + processed_seqIDs = eventSet$sequenceID, + nopastEvents = 0) Setting up data structure for the sliding windows framework....... > > #The results with and without the sliding windows are the same (see correlation > #below). Using the sliding windows method is recommended when the data are > #big' so that memory allotment is more efficient. > cor(eventSet$persist,eventSet$persistSW) [1] 1 > > > #Compute Persistence with respect to the sender's past relational history without > #the sliding windows framework and temporal dependency > eventSet$persistDep <- computePersistence(observed_time = events$time, + observed_receiver = events$target, + observed_sender = events$sender, + processed_time = eventSet$time, + processed_receiver = eventSet$receiver, + processed_sender = eventSet$sender, + sender = TRUE, + dependency = TRUE, + relationalTimeSpan = 5, #the past 5 events + nopastEvents = 0) > > #Compute Persistence with respect to the receiver's past relational history without > #the sliding windows framework and no temporal dependency > eventSet$persistT <- computePersistence(observed_time = events$time, + observed_receiver = events$target, + observed_sender = events$sender, + processed_time = eventSet$time, + processed_receiver = eventSet$receiver, + processed_sender = eventSet$sender, + sender = FALSE, + nopastEvents = 0) > > #Compute Persistence with respect to the receiver's past relational history with > #the sliding windows framework and no temporal dependency > eventSet$persistSWT <- computePersistence(observed_time = events$time, + observed_receiver = events$target, + observed_sender = events$sender, + processed_time = eventSet$time, + processed_receiver = eventSet$receiver, + processed_sender = eventSet$sender, + sender = FALSE, + sliding_windows = TRUE, + processed_seqIDs = eventSet$sequenceID, + nopastEvents = 0) Setting up data structure for the sliding windows framework....... > > #The results with and without the sliding windows are the same (see correlation > #below). Using the sliding windows method is recommended when the data are > #big' so that memory allotment is more efficient. > cor(eventSet$persistT,eventSet$persistSWT) [1] 1 > > > #Compute Persistence with respect to the receiver's past relational history without > #the sliding windows framework and temporal dependency > eventSet$persistDepT <- computePersistence(observed_time = events$time, + observed_receiver = events$target, + observed_sender = events$sender, + processed_time = eventSet$time, + processed_receiver = eventSet$receiver, + processed_sender = eventSet$sender, + sender = FALSE, + dependency = TRUE, + relationalTimeSpan = 5, #the past 5 events + nopastEvents = 0) > > > > > cleanEx() > nameEx("computePrefAttach") > ### * computePrefAttach > > flush(stderr()); flush(stdout()) > > ### Name: computePrefAttach > ### Title: Compute Butts' (2008) Preferential Attachment Network Statistic > ### for Event Dyads in a Relational Event Sequence > ### Aliases: computePrefAttach > > ### ** Examples > > > > # A Dummy One-Mode Event Dataset > events <- data.frame(time = 1:18, + eventID = 1:18, + sender = c("A", "B", "C", + "A", "D", "E", + "F", "B", "A", + "F", "D", "B", + "G", "B", "D", + "H", "A", "D"), + target = c("B", "C", "D", + "E", "A", "F", + "D", "A", "C", + "G", "B", "C", + "H", "J", "A", + "F", "C", "B")) > > # Creating the Post-Processing Event Dataset with Null Events > eventSet <- processOMEventSeq(data = events, + time = events$time, + eventID = events$eventID, + sender = events$sender, + receiver = events$target, + p_samplingobserved = 1.00, + n_controls = 6, + seed = 9999) > > # Compute Preferential Attachment Statistic without Sliding Windows Framework and > # No Temporal Dependency > eventSet$pref <- computePrefAttach(observed_time = events$time, + observed_receiver = events$target, + observed_sender = events$sender, + processed_time = eventSet$time, + processed_receiver = eventSet$receiver, + processed_sender = eventSet$sender, + dependency = FALSE) Warning: `computePrefAttach()` was deprecated in dream 1.0.0. ℹ Please use `remstats_prefattachment()` instead. > > # Compute Preferential Attachment Statistic with Sliding Windows Framework and > # No Temporal Dependency > eventSet$prefSW <- computePrefAttach(observed_time = events$time, + observed_receiver = events$target, + observed_sender = events$sender, + processed_time = eventSet$time, + processed_receiver = eventSet$receiver, + processed_sender = eventSet$sender, + dependency = FALSE, + sliding_windows = TRUE, + processed_seqIDs = eventSet$sequenceID) > > #The results with and without the sliding windows are the same (see correlation > #below). Using the sliding windows method is recommended when the data are > #big' so that memory allotment is more efficient. > cor(eventSet$pref,eventSet$prefSW) #the correlation of the values [1] 1 > > > # Compute Preferential Attachment Statistic without Sliding Windows Framework and > # Temporal Dependency > eventSet$prefdep <- computePrefAttach(observed_time = events$time, + observed_receiver = events$target, + observed_sender = events$sender, + processed_time = eventSet$time, + processed_receiver = eventSet$receiver, + processed_sender = eventSet$sender, + dependency = TRUE, + relationalTimeSpan = 10) > > # Compute Preferential Attachment Statistic with Sliding Windows Framework and > # Temporal Dependency > eventSet$pref1dep <- computePrefAttach(observed_time = events$time, + observed_receiver = events$target, + observed_sender = events$sender, + processed_time = eventSet$time, + processed_receiver = eventSet$receiver, + processed_sender = eventSet$sender, + dependency = TRUE, + relationalTimeSpan = 10, + sliding_windows = TRUE, + processed_seqIDs = eventSet$sequenceID) > > #The results with and without the sliding windows are the same (see correlation > #below). Using the sliding windows method is recommended when the data are > #big' so that memory allotment is more efficient. > cor(eventSet$prefdep,eventSet$pref1dep) #the correlation of the values [1] 1 > > > > > cleanEx() > nameEx("computeReceiverIndegree") > ### * computeReceiverIndegree > > flush(stderr()); flush(stdout()) > > ### Name: computeReceiverIndegree > ### Title: Compute the Indegree Network Statistic for Event Receivers in a > ### Relational Event Sequence > ### Aliases: computeReceiverIndegree > > ### ** Examples > > events <- data.frame(time = 1:18, + eventID = 1:18, + sender = c("A", "B", "C", + "A", "D", "E", + "F", "B", "A", + "F", "D", "B", + "G", "B", "D", + "H", "A", "D"), + target = c("B", "C", "D", + "E", "A", "F", + "D", "A", "C", + "G", "B", "C", + "H", "J", "A", + "F", "C", "B")) > > eventSet <- processOMEventSeq(data = events, + time = events$time, + eventID = events$eventID, + sender = events$sender, + receiver = events$target, + p_samplingobserved = 1.00, + n_controls = 1, + seed = 9999) > > # Computing Target Indegree Statistics without the sliding windows framework > eventSet$target_indegree <- computeReceiverIndegree( + observed_time = events$time, + observed_receiver = events$target, + processed_time = eventSet$time, + processed_receiver = eventSet$receiver, + halflife = 2, #halflife parameter + dyadic_weight = 0, + Lerneretal_2013 = FALSE) Warning: `computeReceiverIndegree()` was deprecated in dream 1.0.0. ℹ Please use `remstats_degree()` instead. > > # Computing Target Indegree Statistics with the sliding windows framework > eventSet$target_indegreeSW <- computeReceiverIndegree( + observed_time = events$time, + observed_receiver = events$target, + processed_time = eventSet$time, + processed_receiver = eventSet$receiver, + halflife = 2, #halflife parameter + processed_seqIDs = eventSet$sequenceID, + dyadic_weight = 0, + sliding_window = TRUE, + Lerneretal_2013 = FALSE) > > #The results with and without the sliding windows are the same (see correlation > #below). Using the sliding windows method is recommended when the data are > #big' so that memory allotment is more efficient. > cor(eventSet$target_indegree , eventSet$target_indegreeSW ) [1] 1 > > # Computing Target Indegree Statistics with the counts of events being returned > eventSet$target_indegreeC <- computeReceiverIndegree( + observed_time = events$time, + observed_receiver = events$target, + processed_time = eventSet$time, + processed_receiver = eventSet$receiver, + halflife = 2, #halflife parameter + processed_seqIDs = eventSet$sequenceID, + dyadic_weight = 0, + sliding_window = TRUE, + Lerneretal_2013 = FALSE, + counts = TRUE) > > cbind(eventSet$target_indegree, + eventSet$target_indegreeSW, + eventSet$target_indegreeC) [,1] [,2] [,3] [1,] 0.00000000 0.00000000 0 [2,] 0.00000000 0.00000000 0 [3,] 0.00000000 0.00000000 0 [4,] 0.00000000 0.00000000 0 [5,] 0.50000000 0.50000000 1 [6,] 0.00000000 0.00000000 0 [7,] 0.00000000 0.00000000 0 [8,] 0.00000000 0.00000000 0 [9,] 0.70710678 0.70710678 1 [10,] 0.00000000 0.00000000 0 [11,] 0.25000000 0.25000000 1 [12,] 0.25000000 0.25000000 1 [13,] 0.50000000 0.50000000 1 [14,] 0.35355339 0.35355339 1 [15,] 0.25000000 0.25000000 1 [16,] 0.08838835 0.08838835 1 [17,] 0.35355339 0.35355339 1 [18,] 0.00000000 0.00000000 0 [19,] 0.44194174 0.44194174 2 [20,] 0.03125000 0.03125000 1 [21,] 0.17677670 0.17677670 1 [22,] 0.38480339 0.38480339 2 [23,] 0.12500000 0.12500000 1 [24,] 0.00000000 0.00000000 0 [25,] 0.35355339 0.35355339 1 [26,] 0.00000000 0.00000000 0 [27,] 0.06250000 0.06250000 1 [28,] 0.11963835 0.11963835 2 [29,] 0.25781250 0.25781250 2 [30,] 0.03125000 0.03125000 1 [31,] 0.50000000 0.50000000 1 [32,] 0.24480097 0.24480097 3 [33,] 0.01104854 0.01104854 1 [34,] 0.09115048 0.09115048 2 [35,] 0.00781250 0.00781250 1 > > > > cleanEx() > nameEx("computeReceiverOutdegree") > ### * computeReceiverOutdegree > > flush(stderr()); flush(stdout()) > > ### Name: computeReceiverOutdegree > ### Title: Compute the Outdegree Network Statistic for Event Receivers in a > ### Relational Event Sequence > ### Aliases: computeReceiverOutdegree > > ### ** Examples > > events <- data.frame(time = 1:18, + eventID = 1:18, + sender = c("A", "B", "C", + "A", "D", "E", + "F", "B", "A", + "F", "D", "B", + "G", "B", "D", + "H", "A", "D"), + target = c("B", "C", "D", + "E", "A", "F", + "D", "A", "C", + "G", "B", "C", + "H", "J", "A", + "F", "C", "B")) > > eventSet <- processOMEventSeq(data = events, + time = events$time, + eventID = events$eventID, + sender = events$sender, + receiver = events$target, + p_samplingobserved = 1.00, + n_controls = 1, + seed = 9999) > > # Computing Target Outdegree Statistics without the sliding windows framework > eventSet$target_outdegree <- computeReceiverOutdegree( + observed_time = events$time, + observed_sender = events$sender, + observed_receiver = events$target, + processed_time = eventSet$time, + processed_sender = eventSet$sender, + processed_receiver = eventSet$receiver, + halflife = 2, #halflife parameter + dyadic_weight = 0, + Lerneretal_2013 = FALSE) Warning: `computeReceiverOutdegree()` was deprecated in dream 1.0.0. ℹ Please use `remstats_degree()` instead. > > # Computing Target Outdegree Statistics with the sliding windows framework > eventSet$target_outdegreeSW <- computeReceiverOutdegree( + observed_time = events$time, + observed_sender = events$sender, + observed_receiver = events$target, + processed_time = eventSet$time, + processed_sender = eventSet$sender, + processed_receiver = eventSet$receiver, + halflife = 2, #halflife parameter + processed_seqIDs = eventSet$sequenceID, + dyadic_weight = 0, + sliding_window = TRUE, + Lerneretal_2013 = FALSE) > > #The results with and without the sliding windows are the same (see correlation > #below). Using the sliding windows method is recommended when the data are > #big' so that memory allotment is more efficient. > cor(eventSet$target_outdegreeSW , eventSet$target_outdegree) [1] 1 > > # Computing Target Outdegree Statistic with the counts of events being returned > eventSet$target_outdegreeC <- computeReceiverOutdegree( + observed_time = events$time, + observed_sender = events$sender, + observed_receiver = events$target, + processed_time = eventSet$time, + processed_sender = eventSet$sender, + processed_receiver = eventSet$receiver, + halflife = 2, #halflife parameter + dyadic_weight = 0, + sliding_window = FALSE, + counts = TRUE, + Lerneretal_2013 = FALSE) > > cbind(eventSet$target_outdegree, + eventSet$target_outdegreeSW, + eventSet$target_outdegreeC) [,1] [,2] [,3] [1,] 0.000000000 0.000000000 0 [2,] 0.000000000 0.000000000 0 [3,] 0.000000000 0.000000000 0 [4,] 0.000000000 0.000000000 0 [5,] 0.500000000 0.500000000 1 [6,] 0.000000000 0.000000000 0 [7,] 0.000000000 0.000000000 0 [8,] 0.957106781 0.957106781 2 [9,] 0.000000000 0.000000000 0 [10,] 0.000000000 0.000000000 0 [11,] 0.676776695 0.676776695 2 [12,] 0.500000000 0.500000000 1 [13,] 0.176776695 0.176776695 1 [14,] 0.338388348 0.338388348 2 [15,] 0.353553391 0.353553391 1 [16,] 0.125000000 0.125000000 1 [17,] 0.500000000 0.500000000 1 [18,] 0.000000000 0.000000000 0 [19,] 0.353553391 0.353553391 1 [20,] 0.397747564 0.397747564 2 [21,] 0.176776695 0.176776695 1 [22,] 0.044194174 0.044194174 1 [23,] 0.676776695 0.676776695 2 [24,] 0.000000000 0.000000000 0 [25,] 0.562500000 0.562500000 2 [26,] 0.000000000 0.000000000 0 [27,] 0.062500000 0.062500000 1 [28,] 0.154909587 0.154909587 3 [29,] 0.000000000 0.000000000 0 [30,] 0.169194174 0.169194174 2 [31,] 0.905980563 0.905980563 3 [32,] 0.007812500 0.007812500 1 [33,] 0.250000000 0.250000000 1 [34,] 0.410156250 0.410156250 4 [35,] 0.005524272 0.005524272 1 > > > > cleanEx() > nameEx("computeRecency") > ### * computeRecency > > flush(stderr()); flush(stdout()) > > ### Name: computeRecency > ### Title: Compute Butts' (2008) Recency Network Statistic for Event Dyads > ### in a Relational Event Sequence > ### Aliases: computeRecency > > ### ** Examples > > > > # A Dummy One-Mode Event Dataset > events <- data.frame(time = 1:18, + eventID = 1:18, + sender = c("A", "B", "C", + "A", "D", "E", + "F", "B", "A", + "F", "D", "B", + "G", "B", "D", + "H", "A", "D"), + target = c("B", "C", "D", + "E", "A", "F", + "D", "A", "C", + "G", "B", "C", + "H", "J", "A", + "F", "C", "B")) > > # Creating the Post-Processing Event Dataset with Null Events > eventSet <- processOMEventSeq(data = events, + time = events$time, + eventID = events$eventID, + sender = events$sender, + receiver = events$target, + p_samplingobserved = 1.00, + n_controls = 6, + seed = 9999) > > # Compute Recency Statistic without Sliding Windows Framework and > # No Temporal Dependency > eventSet$recency_rawdiff <- computeRecency( + observed_time = events$time, + observed_receiver = events$target, + observed_sender = events$sender, + processed_time = eventSet$time, + processed_receiver = eventSet$receiver, + processed_sender = eventSet$sender, + type = "raw.diff", + dependency = FALSE, + i_neighborhood = TRUE, + nopastEvents = 0) Warning: `computeRecency()` was deprecated in dream 1.0.0. ℹ Please use `remstats_recency()` instead. > > # Compute Recency Statistic without Sliding Windows Framework and > # No Temporal Dependency > eventSet$recency_inv <- computeRecency( + observed_time = events$time, + observed_receiver = events$target, + observed_sender = events$sender, + processed_time = eventSet$time, + processed_receiver = eventSet$receiver, + processed_sender = eventSet$sender, + type = "inv.diff.plus1", + dependency = FALSE, + i_neighborhood = TRUE, + nopastEvents = 0) > > > # Compute Recency Statistic without Sliding Windows Framework and > # No Temporal Dependency > eventSet$recency_rank <- computeRecency( + observed_time = events$time, + observed_receiver = events$target, + observed_sender = events$sender, + processed_time = eventSet$time, + processed_receiver = eventSet$receiver, + processed_sender = eventSet$sender, + type = "rank.ordered.count", + dependency = FALSE, + i_neighborhood = TRUE, + nopastEvents = 0) > > # Compute Recency Statistic with Sliding Windows Framework and No Temporal Dependency > eventSet$recency_rawdiffSW <- computeRecency( + observed_time = events$time, + observed_receiver = events$target, + observed_sender = events$sender, + processed_time = eventSet$time, + processed_receiver = eventSet$receiver, + processed_sender = eventSet$sender, + type = "raw.diff", + dependency = FALSE, + i_neighborhood = TRUE, + sliding_windows = TRUE, + processed_seqIDs = eventSet$sequenceID, + nopastEvents = 0) > > > # Compute Recency Statistic with Sliding Windows Framework and No Temporal Dependency > eventSet$recency_invSW <- computeRecency( + observed_time = events$time, + observed_receiver = events$target, + observed_sender = events$sender, + processed_time = eventSet$time, + processed_receiver = eventSet$receiver, + processed_sender = eventSet$sender, + type = "inv.diff.plus1", + dependency = FALSE, + i_neighborhood = TRUE, + sliding_windows = TRUE, + processed_seqIDs = eventSet$sequenceID, + nopastEvents = 0) > > > # Compute Recency Statistic with Sliding Windows Framework and No Temporal Dependency > eventSet$recency_rankSW <- computeRecency( + observed_time = events$time, + observed_receiver = events$target, + observed_sender = events$sender, + processed_time = eventSet$time, + processed_receiver = eventSet$receiver, + processed_sender = eventSet$sender, + type = "rank.ordered.count", + dependency = FALSE, + i_neighborhood = TRUE, + sliding_windows = TRUE, + processed_seqIDs = eventSet$sequenceID, + nopastEvents = 0) > > > > > cleanEx() > nameEx("computeReciprocity") > ### * computeReciprocity > > flush(stderr()); flush(stdout()) > > ### Name: computeReciprocity > ### Title: Compute the Reciprocity Network Statistic for Event Dyads in a > ### Relational Event Sequence > ### Aliases: computeReciprocity > > ### ** Examples > > events <- data.frame(time = 1:18, eventID = 1:18, + sender = c("A", "B", "C", + "A", "D", "E", + "F", "B", "A", + "F", "D", "B", + "G", "B", "D", + "H", "A", "D"), + target = c("B", "C", "D", + "E", "A", "F", + "D", "A", "C", + "G", "B", "C", + "H", "J", "A", + "F", "C", "B")) > > eventSet <- processOMEventSeq(data = events, + time = events$time, + eventID = events$eventID, + sender = events$sender, + receiver = events$target, + p_samplingobserved = 1.00, + n_controls = 1, + seed = 9999) > > # Computing Reciprocity Statistics without the sliding windows framework > eventSet$recip <- computeReciprocity( + observed_time = events$time, + observed_sender = events$sender, + observed_receiver = events$target, + processed_time = eventSet$time, + processed_sender = eventSet$sender, + processed_receiver = eventSet$receiver, + halflife = 2, #halflife parameter + dyadic_weight = 0, + Lerneretal_2013 = FALSE) Warning: `computeReciprocity()` was deprecated in dream 1.0.0. ℹ Please use `remstats_reciprocity()` instead. > > # Computing Reciprocity Statistics with the sliding windows framework > eventSet$recipSW <- computeReciprocity( + observed_time = events$time, + observed_sender = events$sender, + observed_receiver = events$target, + processed_time = eventSet$time, + processed_sender = eventSet$sender, + processed_receiver = eventSet$receiver, + halflife = 2, #halflife parameter + processed_seqIDs = eventSet$sequenceID, + dyadic_weight = 0, + sliding_window = TRUE, + Lerneretal_2013 = FALSE) > > #The results with and without the sliding windows are the same (see correlation > #below). Using the sliding windows method is recommended when the data are > #big' so that memory allotment is more efficient. > cor(eventSet$recipSW , eventSet$recip) [1] 1 > > # Computing Reciprocity Statistics with the counts of events being returned > eventSet$recipC <- computeReciprocity( + observed_time = events$time, + observed_sender = events$sender, + observed_receiver = events$target, + processed_time = eventSet$time, + processed_sender = eventSet$sender, + processed_receiver = eventSet$receiver, + halflife = 2, #halflife parameter + dyadic_weight = 0, + sliding_window = FALSE, + counts = TRUE, + Lerneretal_2013 = FALSE) > > cbind(eventSet$recip, + eventSet$recipSW, + eventSet$recipC) [,1] [,2] [,3] [1,] 0.00000000 0.00000000 0 [2,] 0.00000000 0.00000000 0 [3,] 0.00000000 0.00000000 0 [4,] 0.00000000 0.00000000 0 [5,] 0.00000000 0.00000000 0 [6,] 0.00000000 0.00000000 0 [7,] 0.00000000 0.00000000 0 [8,] 0.00000000 0.00000000 0 [9,] 0.70710678 0.70710678 1 [10,] 0.00000000 0.00000000 0 [11,] 0.00000000 0.00000000 0 [12,] 0.00000000 0.00000000 0 [13,] 0.50000000 0.50000000 1 [14,] 0.08838835 0.08838835 1 [15,] 0.00000000 0.00000000 0 [16,] 0.00000000 0.00000000 0 [17,] 0.00000000 0.00000000 0 [18,] 0.00000000 0.00000000 0 [19,] 0.00000000 0.00000000 0 [20,] 0.00000000 0.00000000 0 [21,] 0.00000000 0.00000000 0 [22,] 0.00000000 0.00000000 0 [23,] 0.00000000 0.00000000 0 [24,] 0.00000000 0.00000000 0 [25,] 0.00000000 0.00000000 0 [26,] 0.00000000 0.00000000 0 [27,] 0.00000000 0.00000000 0 [28,] 0.00000000 0.00000000 0 [29,] 0.00000000 0.00000000 0 [30,] 0.00000000 0.00000000 0 [31,] 0.00000000 0.00000000 0 [32,] 0.00000000 0.00000000 0 [33,] 0.35355339 0.35355339 1 [34,] 0.00000000 0.00000000 0 [35,] 0.00000000 0.00000000 0 > > > > cleanEx() > nameEx("computeRemDyadCut") > ### * computeRemDyadCut > > flush(stderr()); flush(stdout()) > > ### Name: computeRemDyadCut > ### Title: A Helper Function to Assist Researchers in Finding Dyadic Weight > ### Cutoff Values > ### Aliases: computeRemDyadCut > > ### ** Examples > > #To replicate the example in the details section: > # with the Lerner et al. 2013 weighting function > computeRemDyadCut(halflife = 15, + relationalWidth = 30, + Lerneretal_2013 = TRUE) Warning: `computeRemDyadCut()` was deprecated in dream 1.0.0. ℹ Please use `remstats_dyadcut()` instead. You are employing this function to find the corresponding dyadic cutoff value for temporal relevancy. The eventTime, relationalWidth, and halflife parameters must all be in the same measurement unit (e.g., hours, days). We hope you are providing the correct values... [1] 0.01155245 > > # without the Lerner et al. 2013 weighting function > computeRemDyadCut(halflife = 15, + relationalWidth = 30, + Lerneretal_2013 = FALSE) You are employing this function to find the corresponding dyadic cutoff value for temporal relevancy. The eventTime, relationalWidth, and halflife parameters must all be in the same measurement unit (e.g., hours, days). We hope you are providing the correct values... [1] 0.25 > > # A result to test the function (should come out to 0.50) > computeRemDyadCut(halflife = 30, + relationalWidth = 30, + Lerneretal_2013 = FALSE) You are employing this function to find the corresponding dyadic cutoff value for temporal relevancy. The eventTime, relationalWidth, and halflife parameters must all be in the same measurement unit (e.g., hours, days). We hope you are providing the correct values... [1] 0.5 > > > # Replicating Lerner and Lomi (2020): > #"We set T1/2 to 30 days so that an event counts as (close to) one in the very next instant of time, > #it counts as 1/2 one month later, it counts as 1/4 two months after the event, and so on. To reduce > #the memory consumption needed to store the network of past events, we set a dyadic weight to > #zero if its value drops below 0.01. If a single event occurred in some dyad this would happen after > #6.64×T1/2, that is after more than half a year." (Lerner and Lomi 2020: 104). > > # Based upon Lerner and Lomi (2020: 104), the result should be around 0.01. Since the > # time values in Lerner and Lomi (2020) are in milliseconds, we have to change > # all measurements into milliseconds > computeRemDyadCut(halflife = (30*24*60*60*1000), #30 days in milliseconds + relationalWidth = (6.64*30*24*60*60*1000), #Based upon the paper + #using the Lerner and Lomi (2020) weighting function + Lerneretal_2013 = FALSE) You are employing this function to find the corresponding dyadic cutoff value for temporal relevancy. The eventTime, relationalWidth, and halflife parameters must all be in the same measurement unit (e.g., hours, days). We hope you are providing the correct values... [1] 0.01002676 > > > > > > cleanEx() > nameEx("computeRepetition") > ### * computeRepetition > > flush(stderr()); flush(stdout()) > > ### Name: computeRepetition > ### Title: Compute Butts' (2008) Repetition Network Statistic for Event > ### Dyads in a Relational Event Sequence > ### Aliases: computeRepetition > > ### ** Examples > > data("WikiEvent2018.first100k") > WikiEvent2018 <- WikiEvent2018.first100k[1:10000,] #the first ten thousand events > WikiEvent2018$time <- as.numeric(WikiEvent2018$time) #making the variable numeric > ### Creating the EventSet By Employing Case-Control Sampling With M = 5 and > ### Sampling from the Observed Event Sequence with P = 0.01 > EventSet <- processTMEventSeq( + data = WikiEvent2018, # The Event Dataset + time = WikiEvent2018$time, # The Time Variable + eventID = WikiEvent2018$eventID, # The Event Sequence Variable + sender = WikiEvent2018$user, # The Sender Variable + receiver = WikiEvent2018$article, # The Receiver Variable + p_samplingobserved = 0.01, # The Probability of Selection + n_controls = 5, # The Number of Controls to Sample from the Full Risk Set + seed = 9999) # The Seed for Replication > #### Estimating Repetition Scores Without the Sliding Windows Framework > EventSet$rep <- computeRepetition( + observed_time = WikiEvent2018$time, + observed_sender = WikiEvent2018$user, + observed_receiver = WikiEvent2018$article, + processed_time = EventSet$time, + processed_sender = EventSet$sender, + processed_receiver = EventSet$receiver, + halflife = 2.592e+09, #halflife parameter + dyadic_weight = 0, + Lerneretal_2013 = FALSE) Warning: `computeRepetition()` was deprecated in dream 1.0.0. ℹ Please use `remstats_repetition()` instead. > > EventSet$sw_rep <- computeRepetition( + observed_time = WikiEvent2018$time, + observed_sender = WikiEvent2018$user, + observed_receiver = WikiEvent2018$article, + processed_time = EventSet$time, + processed_sender = EventSet$sender, + processed_receiver = EventSet$receiver, + processed_seqIDs = EventSet$sequenceID, + halflife = 2.592e+09, #halflife parameter + dyadic_weight = 0, + sliding_window = TRUE, + Lerneretal_2013 = FALSE) > > #The results with and without the sliding windows are the same (see correlation > #below). Using the sliding windows method is recommended when the data are > #big' so that memory allotment is more efficient. > cor(EventSet$sw_rep, EventSet$rep) [1] 1 > > #### Estimating Repetition Scores with the Counts of Events Returned > EventSet$repC <- computeRepetition( + observed_time = WikiEvent2018$time, + observed_sender = WikiEvent2018$user, + observed_receiver = WikiEvent2018$article, + processed_time = EventSet$time, + processed_sender = EventSet$sender, + processed_receiver = EventSet$receiver, + halflife = 2.592e+09, #halflife parameter + dyadic_weight = 0, + Lerneretal_2013 = FALSE, + counts = TRUE) > > cbind(EventSet$rep, + EventSet$sw_rep, + EventSet$repC) [,1] [,2] [,3] [1,] 0.0000000 0.0000000 0 [2,] 0.0000000 0.0000000 0 [3,] 0.8685742 0.8685742 1 [4,] 0.0000000 0.0000000 0 [5,] 0.0000000 0.0000000 0 [6,] 0.0000000 0.0000000 0 [7,] 0.0000000 0.0000000 0 [8,] 0.0000000 0.0000000 0 [9,] 0.0000000 0.0000000 0 [10,] 0.0000000 0.0000000 0 [11,] 0.0000000 0.0000000 0 [12,] 0.0000000 0.0000000 0 [13,] 2.9974339 2.9974339 3 [14,] 0.0000000 0.0000000 0 [15,] 0.0000000 0.0000000 0 [16,] 0.0000000 0.0000000 0 [17,] 0.0000000 0.0000000 0 [18,] 0.0000000 0.0000000 0 [19,] 0.0000000 0.0000000 0 [20,] 0.0000000 0.0000000 0 [21,] 0.0000000 0.0000000 0 [22,] 0.0000000 0.0000000 0 [23,] 0.0000000 0.0000000 0 [24,] 0.0000000 0.0000000 0 [25,] 0.0000000 0.0000000 0 [26,] 0.0000000 0.0000000 0 [27,] 0.0000000 0.0000000 0 [28,] 0.0000000 0.0000000 0 [29,] 0.0000000 0.0000000 0 [30,] 0.0000000 0.0000000 0 [31,] 0.0000000 0.0000000 0 [32,] 0.0000000 0.0000000 0 [33,] 0.0000000 0.0000000 0 [34,] 0.0000000 0.0000000 0 [35,] 0.0000000 0.0000000 0 [36,] 0.0000000 0.0000000 0 [37,] 0.0000000 0.0000000 0 [38,] 0.0000000 0.0000000 0 [39,] 0.0000000 0.0000000 0 [40,] 0.0000000 0.0000000 0 [41,] 0.0000000 0.0000000 0 [42,] 0.0000000 0.0000000 0 [43,] 0.0000000 0.0000000 0 [44,] 0.0000000 0.0000000 0 [45,] 0.0000000 0.0000000 0 [46,] 0.0000000 0.0000000 0 [47,] 0.0000000 0.0000000 0 [48,] 0.0000000 0.0000000 0 [49,] 0.0000000 0.0000000 0 [50,] 0.0000000 0.0000000 0 [51,] 0.0000000 0.0000000 0 [52,] 0.0000000 0.0000000 0 [53,] 0.0000000 0.0000000 0 [54,] 0.0000000 0.0000000 0 [55,] 0.0000000 0.0000000 0 [56,] 0.0000000 0.0000000 0 [57,] 0.0000000 0.0000000 0 [58,] 0.3071704 0.3071704 1 [59,] 0.0000000 0.0000000 0 [60,] 0.0000000 0.0000000 0 [61,] 0.0000000 0.0000000 0 [62,] 0.0000000 0.0000000 0 [63,] 0.0000000 0.0000000 0 [64,] 0.0000000 0.0000000 0 [65,] 0.0000000 0.0000000 0 [66,] 0.0000000 0.0000000 0 [67,] 0.0000000 0.0000000 0 [68,] 0.0000000 0.0000000 0 [69,] 0.0000000 0.0000000 0 [70,] 0.0000000 0.0000000 0 [71,] 0.0000000 0.0000000 0 [72,] 0.0000000 0.0000000 0 [73,] 0.9999719 0.9999719 1 [74,] 0.0000000 0.0000000 0 [75,] 0.0000000 0.0000000 0 [76,] 0.0000000 0.0000000 0 [77,] 0.1054457 0.1054457 1 [78,] 0.0000000 0.0000000 0 [79,] 0.0000000 0.0000000 0 [80,] 0.0000000 0.0000000 0 [81,] 0.0000000 0.0000000 0 [82,] 0.0000000 0.0000000 0 [83,] 0.0000000 0.0000000 0 [84,] 0.0000000 0.0000000 0 [85,] 0.0000000 0.0000000 0 [86,] 0.0000000 0.0000000 0 [87,] 0.0000000 0.0000000 0 [88,] 0.0000000 0.0000000 0 [89,] 0.0000000 0.0000000 0 [90,] 0.0000000 0.0000000 0 [91,] 0.0000000 0.0000000 0 [92,] 0.0000000 0.0000000 0 [93,] 0.0000000 0.0000000 0 [94,] 0.0000000 0.0000000 0 [95,] 0.0000000 0.0000000 0 [96,] 0.0000000 0.0000000 0 [97,] 0.0000000 0.0000000 0 [98,] 0.0000000 0.0000000 0 [99,] 0.0000000 0.0000000 0 [100,] 0.0000000 0.0000000 0 [101,] 0.0000000 0.0000000 0 [102,] 0.0000000 0.0000000 0 [103,] 0.0000000 0.0000000 0 [104,] 0.0000000 0.0000000 0 [105,] 0.0000000 0.0000000 0 [106,] 0.0000000 0.0000000 0 [107,] 0.0000000 0.0000000 0 [108,] 0.0000000 0.0000000 0 [109,] 0.0000000 0.0000000 0 [110,] 0.0000000 0.0000000 0 [111,] 0.0000000 0.0000000 0 [112,] 0.0000000 0.0000000 0 [113,] 0.0000000 0.0000000 0 [114,] 0.0000000 0.0000000 0 [115,] 0.9999968 0.9999968 1 [116,] 0.0000000 0.0000000 0 [117,] 0.0000000 0.0000000 0 [118,] 0.0000000 0.0000000 0 [119,] 0.0000000 0.0000000 0 [120,] 0.0000000 0.0000000 0 [121,] 0.0000000 0.0000000 0 [122,] 0.0000000 0.0000000 0 [123,] 0.0000000 0.0000000 0 [124,] 0.0000000 0.0000000 0 [125,] 0.0000000 0.0000000 0 [126,] 0.0000000 0.0000000 0 [127,] 0.0000000 0.0000000 0 [128,] 0.0000000 0.0000000 0 [129,] 0.0000000 0.0000000 0 [130,] 0.0000000 0.0000000 0 [131,] 0.0000000 0.0000000 0 [132,] 0.0000000 0.0000000 0 [133,] 0.0000000 0.0000000 0 [134,] 0.0000000 0.0000000 0 [135,] 0.0000000 0.0000000 0 [136,] 0.0000000 0.0000000 0 [137,] 0.0000000 0.0000000 0 [138,] 0.0000000 0.0000000 0 [139,] 0.0000000 0.0000000 0 [140,] 0.0000000 0.0000000 0 [141,] 0.0000000 0.0000000 0 [142,] 0.0000000 0.0000000 0 [143,] 0.0000000 0.0000000 0 [144,] 0.0000000 0.0000000 0 [145,] 0.0000000 0.0000000 0 [146,] 0.0000000 0.0000000 0 [147,] 0.0000000 0.0000000 0 [148,] 0.0000000 0.0000000 0 [149,] 0.0000000 0.0000000 0 [150,] 0.0000000 0.0000000 0 [151,] 0.7681050 0.7681050 1 [152,] 0.0000000 0.0000000 0 [153,] 0.0000000 0.0000000 0 [154,] 0.0000000 0.0000000 0 [155,] 0.0000000 0.0000000 0 [156,] 0.0000000 0.0000000 0 [157,] 0.0000000 0.0000000 0 [158,] 0.0000000 0.0000000 0 [159,] 0.0000000 0.0000000 0 [160,] 0.0000000 0.0000000 0 [161,] 0.0000000 0.0000000 0 [162,] 0.0000000 0.0000000 0 [163,] 0.0000000 0.0000000 0 [164,] 0.0000000 0.0000000 0 [165,] 0.0000000 0.0000000 0 [166,] 0.0000000 0.0000000 0 [167,] 0.0000000 0.0000000 0 [168,] 0.0000000 0.0000000 0 [169,] 0.0000000 0.0000000 0 [170,] 0.0000000 0.0000000 0 [171,] 0.0000000 0.0000000 0 [172,] 0.0000000 0.0000000 0 [173,] 0.0000000 0.0000000 0 [174,] 0.0000000 0.0000000 0 [175,] 0.0000000 0.0000000 0 [176,] 0.0000000 0.0000000 0 [177,] 0.0000000 0.0000000 0 [178,] 0.0000000 0.0000000 0 [179,] 0.0000000 0.0000000 0 [180,] 0.0000000 0.0000000 0 [181,] 0.0000000 0.0000000 0 [182,] 0.0000000 0.0000000 0 [183,] 0.0000000 0.0000000 0 [184,] 0.0000000 0.0000000 0 [185,] 0.0000000 0.0000000 0 [186,] 0.0000000 0.0000000 0 [187,] 1.9999874 1.9999874 2 [188,] 0.0000000 0.0000000 0 [189,] 0.0000000 0.0000000 0 [190,] 0.0000000 0.0000000 0 [191,] 0.0000000 0.0000000 0 [192,] 0.0000000 0.0000000 0 [193,] 0.0000000 0.0000000 0 [194,] 0.0000000 0.0000000 0 [195,] 0.0000000 0.0000000 0 [196,] 0.0000000 0.0000000 0 [197,] 0.0000000 0.0000000 0 [198,] 0.0000000 0.0000000 0 [199,] 0.0000000 0.0000000 0 [200,] 0.0000000 0.0000000 0 [201,] 0.0000000 0.0000000 0 [202,] 0.0000000 0.0000000 0 [203,] 0.0000000 0.0000000 0 [204,] 0.0000000 0.0000000 0 [205,] 0.0000000 0.0000000 0 [206,] 0.0000000 0.0000000 0 [207,] 0.0000000 0.0000000 0 [208,] 0.0000000 0.0000000 0 [209,] 0.0000000 0.0000000 0 [210,] 0.0000000 0.0000000 0 [211,] 0.0000000 0.0000000 0 [212,] 0.0000000 0.0000000 0 [213,] 0.0000000 0.0000000 0 [214,] 0.0000000 0.0000000 0 [215,] 0.0000000 0.0000000 0 [216,] 0.0000000 0.0000000 0 [217,] 0.0000000 0.0000000 0 [218,] 0.0000000 0.0000000 0 [219,] 0.0000000 0.0000000 0 [220,] 0.0000000 0.0000000 0 [221,] 0.0000000 0.0000000 0 [222,] 0.0000000 0.0000000 0 [223,] 0.0000000 0.0000000 0 [224,] 0.0000000 0.0000000 0 [225,] 0.0000000 0.0000000 0 [226,] 0.0000000 0.0000000 0 [227,] 0.0000000 0.0000000 0 [228,] 0.0000000 0.0000000 0 [229,] 0.0000000 0.0000000 0 [230,] 0.0000000 0.0000000 0 [231,] 0.0000000 0.0000000 0 [232,] 0.0000000 0.0000000 0 [233,] 0.0000000 0.0000000 0 [234,] 0.0000000 0.0000000 0 [235,] 0.0000000 0.0000000 0 [236,] 0.0000000 0.0000000 0 [237,] 0.0000000 0.0000000 0 [238,] 0.0000000 0.0000000 0 [239,] 0.0000000 0.0000000 0 [240,] 0.0000000 0.0000000 0 [241,] 0.0000000 0.0000000 0 [242,] 0.0000000 0.0000000 0 [243,] 0.0000000 0.0000000 0 [244,] 0.0000000 0.0000000 0 [245,] 0.0000000 0.0000000 0 [246,] 0.0000000 0.0000000 0 [247,] 2.9314117 2.9314117 3 [248,] 0.0000000 0.0000000 0 [249,] 0.0000000 0.0000000 0 [250,] 0.0000000 0.0000000 0 [251,] 0.0000000 0.0000000 0 [252,] 0.0000000 0.0000000 0 [253,] 0.0000000 0.0000000 0 [254,] 0.0000000 0.0000000 0 [255,] 0.0000000 0.0000000 0 [256,] 0.0000000 0.0000000 0 [257,] 0.0000000 0.0000000 0 [258,] 0.0000000 0.0000000 0 [259,] 0.0000000 0.0000000 0 [260,] 0.0000000 0.0000000 0 [261,] 0.0000000 0.0000000 0 [262,] 0.0000000 0.0000000 0 [263,] 0.0000000 0.0000000 0 [264,] 0.0000000 0.0000000 0 [265,] 0.0000000 0.0000000 0 [266,] 0.0000000 0.0000000 0 [267,] 0.0000000 0.0000000 0 [268,] 0.0000000 0.0000000 0 [269,] 0.0000000 0.0000000 0 [270,] 0.0000000 0.0000000 0 [271,] 0.0000000 0.0000000 0 [272,] 0.0000000 0.0000000 0 [273,] 0.0000000 0.0000000 0 [274,] 0.0000000 0.0000000 0 [275,] 0.0000000 0.0000000 0 [276,] 0.0000000 0.0000000 0 [277,] 0.0000000 0.0000000 0 [278,] 0.0000000 0.0000000 0 [279,] 0.0000000 0.0000000 0 [280,] 0.0000000 0.0000000 0 [281,] 0.0000000 0.0000000 0 [282,] 0.0000000 0.0000000 0 [283,] 0.0000000 0.0000000 0 [284,] 0.0000000 0.0000000 0 [285,] 0.0000000 0.0000000 0 [286,] 0.0000000 0.0000000 0 [287,] 0.0000000 0.0000000 0 [288,] 0.0000000 0.0000000 0 [289,] 0.0000000 0.0000000 0 [290,] 0.0000000 0.0000000 0 [291,] 0.0000000 0.0000000 0 [292,] 0.0000000 0.0000000 0 [293,] 0.0000000 0.0000000 0 [294,] 0.0000000 0.0000000 0 [295,] 0.0000000 0.0000000 0 [296,] 0.0000000 0.0000000 0 [297,] 0.0000000 0.0000000 0 [298,] 0.0000000 0.0000000 0 [299,] 0.0000000 0.0000000 0 [300,] 0.0000000 0.0000000 0 [301,] 1.9999310 1.9999310 2 [302,] 0.0000000 0.0000000 0 [303,] 0.0000000 0.0000000 0 [304,] 0.0000000 0.0000000 0 [305,] 0.0000000 0.0000000 0 [306,] 0.0000000 0.0000000 0 [307,] 0.0000000 0.0000000 0 [308,] 0.0000000 0.0000000 0 [309,] 0.0000000 0.0000000 0 [310,] 0.0000000 0.0000000 0 [311,] 0.0000000 0.0000000 0 [312,] 0.0000000 0.0000000 0 [313,] 0.0000000 0.0000000 0 [314,] 0.0000000 0.0000000 0 [315,] 0.0000000 0.0000000 0 [316,] 0.0000000 0.0000000 0 [317,] 0.0000000 0.0000000 0 [318,] 0.0000000 0.0000000 0 [319,] 0.0000000 0.0000000 0 [320,] 0.0000000 0.0000000 0 [321,] 0.0000000 0.0000000 0 [322,] 0.0000000 0.0000000 0 [323,] 0.0000000 0.0000000 0 [324,] 0.0000000 0.0000000 0 [325,] 0.0000000 0.0000000 0 [326,] 0.0000000 0.0000000 0 [327,] 0.0000000 0.0000000 0 [328,] 0.0000000 0.0000000 0 [329,] 0.0000000 0.0000000 0 [330,] 0.0000000 0.0000000 0 [331,] 0.0000000 0.0000000 0 [332,] 0.0000000 0.0000000 0 [333,] 0.0000000 0.0000000 0 [334,] 0.0000000 0.0000000 0 [335,] 0.0000000 0.0000000 0 [336,] 0.0000000 0.0000000 0 [337,] 2.9999385 2.9999385 3 [338,] 0.0000000 0.0000000 0 [339,] 0.0000000 0.0000000 0 [340,] 0.0000000 0.0000000 0 [341,] 0.0000000 0.0000000 0 [342,] 0.0000000 0.0000000 0 [343,] 0.0000000 0.0000000 0 [344,] 0.0000000 0.0000000 0 [345,] 0.0000000 0.0000000 0 [346,] 0.0000000 0.0000000 0 [347,] 0.0000000 0.0000000 0 [348,] 0.0000000 0.0000000 0 [349,] 0.0000000 0.0000000 0 [350,] 0.0000000 0.0000000 0 [351,] 0.0000000 0.0000000 0 [352,] 0.0000000 0.0000000 0 [353,] 0.0000000 0.0000000 0 [354,] 0.0000000 0.0000000 0 [355,] 0.0000000 0.0000000 0 [356,] 0.0000000 0.0000000 0 [357,] 0.0000000 0.0000000 0 [358,] 0.0000000 0.0000000 0 [359,] 0.0000000 0.0000000 0 [360,] 0.0000000 0.0000000 0 [361,] 0.0000000 0.0000000 0 [362,] 0.0000000 0.0000000 0 [363,] 0.0000000 0.0000000 0 [364,] 0.0000000 0.0000000 0 [365,] 0.0000000 0.0000000 0 [366,] 0.0000000 0.0000000 0 [367,] 0.0000000 0.0000000 0 [368,] 0.0000000 0.0000000 0 [369,] 0.0000000 0.0000000 0 [370,] 0.0000000 0.0000000 0 [371,] 0.0000000 0.0000000 0 [372,] 0.0000000 0.0000000 0 [373,] 0.0000000 0.0000000 0 [374,] 0.0000000 0.0000000 0 [375,] 0.0000000 0.0000000 0 [376,] 0.0000000 0.0000000 0 [377,] 0.0000000 0.0000000 0 [378,] 0.0000000 0.0000000 0 [379,] 0.0000000 0.0000000 0 [380,] 0.0000000 0.0000000 0 [381,] 0.0000000 0.0000000 0 [382,] 0.0000000 0.0000000 0 [383,] 0.0000000 0.0000000 0 [384,] 0.0000000 0.0000000 0 [385,] 0.0000000 0.0000000 0 [386,] 0.0000000 0.0000000 0 [387,] 0.0000000 0.0000000 0 [388,] 0.0000000 0.0000000 0 [389,] 0.0000000 0.0000000 0 [390,] 0.0000000 0.0000000 0 [391,] 0.0000000 0.0000000 0 [392,] 0.0000000 0.0000000 0 [393,] 0.0000000 0.0000000 0 [394,] 0.0000000 0.0000000 0 [395,] 0.0000000 0.0000000 0 [396,] 0.0000000 0.0000000 0 [397,] 0.0000000 0.0000000 0 [398,] 0.0000000 0.0000000 0 [399,] 0.0000000 0.0000000 0 [400,] 0.0000000 0.0000000 0 [401,] 0.0000000 0.0000000 0 [402,] 0.0000000 0.0000000 0 [403,] 0.0000000 0.0000000 0 [404,] 0.0000000 0.0000000 0 [405,] 0.0000000 0.0000000 0 [406,] 0.0000000 0.0000000 0 [407,] 0.0000000 0.0000000 0 [408,] 0.0000000 0.0000000 0 [409,] 0.0000000 0.0000000 0 [410,] 0.0000000 0.0000000 0 [411,] 0.0000000 0.0000000 0 [412,] 0.0000000 0.0000000 0 [413,] 0.0000000 0.0000000 0 [414,] 0.0000000 0.0000000 0 [415,] 0.0000000 0.0000000 0 [416,] 0.0000000 0.0000000 0 [417,] 0.0000000 0.0000000 0 [418,] 0.0000000 0.0000000 0 [419,] 0.0000000 0.0000000 0 [420,] 0.0000000 0.0000000 0 [421,] 0.0000000 0.0000000 0 [422,] 0.0000000 0.0000000 0 [423,] 0.0000000 0.0000000 0 [424,] 0.0000000 0.0000000 0 [425,] 0.0000000 0.0000000 0 [426,] 0.0000000 0.0000000 0 [427,] 0.0000000 0.0000000 0 [428,] 0.0000000 0.0000000 0 [429,] 0.0000000 0.0000000 0 [430,] 0.0000000 0.0000000 0 [431,] 0.0000000 0.0000000 0 [432,] 0.0000000 0.0000000 0 [433,] 0.0000000 0.0000000 0 [434,] 0.0000000 0.0000000 0 [435,] 0.0000000 0.0000000 0 [436,] 0.0000000 0.0000000 0 [437,] 0.0000000 0.0000000 0 [438,] 0.0000000 0.0000000 0 [439,] 0.0000000 0.0000000 0 [440,] 0.0000000 0.0000000 0 [441,] 0.0000000 0.0000000 0 [442,] 0.0000000 0.0000000 0 [443,] 0.0000000 0.0000000 0 [444,] 0.0000000 0.0000000 0 [445,] 0.0000000 0.0000000 0 [446,] 0.0000000 0.0000000 0 [447,] 0.0000000 0.0000000 0 [448,] 0.0000000 0.0000000 0 [449,] 0.0000000 0.0000000 0 [450,] 0.0000000 0.0000000 0 [451,] 0.0000000 0.0000000 0 [452,] 0.0000000 0.0000000 0 [453,] 0.0000000 0.0000000 0 [454,] 0.0000000 0.0000000 0 [455,] 0.0000000 0.0000000 0 [456,] 0.0000000 0.0000000 0 [457,] 0.0000000 0.0000000 0 [458,] 0.0000000 0.0000000 0 [459,] 0.0000000 0.0000000 0 [460,] 0.0000000 0.0000000 0 [461,] 0.0000000 0.0000000 0 [462,] 0.0000000 0.0000000 0 [463,] 0.0000000 0.0000000 0 [464,] 0.0000000 0.0000000 0 [465,] 0.0000000 0.0000000 0 [466,] 0.0000000 0.0000000 0 [467,] 0.0000000 0.0000000 0 [468,] 0.0000000 0.0000000 0 [469,] 0.0000000 0.0000000 0 [470,] 0.0000000 0.0000000 0 [471,] 0.0000000 0.0000000 0 [472,] 0.0000000 0.0000000 0 [473,] 0.0000000 0.0000000 0 [474,] 0.0000000 0.0000000 0 [475,] 0.0000000 0.0000000 0 [476,] 0.0000000 0.0000000 0 [477,] 0.0000000 0.0000000 0 [478,] 0.0000000 0.0000000 0 [479,] 0.0000000 0.0000000 0 [480,] 0.0000000 0.0000000 0 [481,] 0.0000000 0.0000000 0 [482,] 0.0000000 0.0000000 0 [483,] 0.0000000 0.0000000 0 [484,] 0.0000000 0.0000000 0 [485,] 0.0000000 0.0000000 0 [486,] 0.0000000 0.0000000 0 [487,] 0.9999901 0.9999901 1 [488,] 0.0000000 0.0000000 0 [489,] 0.0000000 0.0000000 0 [490,] 0.0000000 0.0000000 0 [491,] 0.0000000 0.0000000 0 [492,] 0.0000000 0.0000000 0 [493,] 0.0000000 0.0000000 0 [494,] 0.0000000 0.0000000 0 [495,] 0.0000000 0.0000000 0 [496,] 0.0000000 0.0000000 0 [497,] 0.0000000 0.0000000 0 [498,] 0.0000000 0.0000000 0 [499,] 0.9999834 0.9999834 1 [500,] 0.0000000 0.0000000 0 [501,] 0.0000000 0.0000000 0 [502,] 0.0000000 0.0000000 0 [503,] 0.0000000 0.0000000 0 [504,] 0.0000000 0.0000000 0 [505,] 0.0000000 0.0000000 0 [506,] 0.0000000 0.0000000 0 [507,] 0.0000000 0.0000000 0 [508,] 0.0000000 0.0000000 0 [509,] 0.0000000 0.0000000 0 [510,] 0.0000000 0.0000000 0 [511,] 0.0000000 0.0000000 0 [512,] 0.0000000 0.0000000 0 [513,] 0.0000000 0.0000000 0 [514,] 0.0000000 0.0000000 0 [515,] 0.0000000 0.0000000 0 [516,] 0.0000000 0.0000000 0 [517,] 0.0000000 0.0000000 0 [518,] 0.0000000 0.0000000 0 [519,] 0.0000000 0.0000000 0 [520,] 0.0000000 0.0000000 0 [521,] 0.0000000 0.0000000 0 [522,] 0.0000000 0.0000000 0 [523,] 0.0000000 0.0000000 0 [524,] 0.0000000 0.0000000 0 [525,] 0.0000000 0.0000000 0 [526,] 0.0000000 0.0000000 0 [527,] 0.0000000 0.0000000 0 [528,] 0.0000000 0.0000000 0 [529,] 0.0000000 0.0000000 0 [530,] 0.0000000 0.0000000 0 [531,] 0.0000000 0.0000000 0 [532,] 0.0000000 0.0000000 0 [533,] 0.0000000 0.0000000 0 [534,] 0.0000000 0.0000000 0 [535,] 0.0000000 0.0000000 0 [536,] 0.0000000 0.0000000 0 [537,] 0.0000000 0.0000000 0 [538,] 0.0000000 0.0000000 0 [539,] 0.0000000 0.0000000 0 [540,] 0.0000000 0.0000000 0 [541,] 0.0000000 0.0000000 0 [542,] 0.0000000 0.0000000 0 [543,] 0.0000000 0.0000000 0 [544,] 0.0000000 0.0000000 0 [545,] 0.0000000 0.0000000 0 [546,] 0.0000000 0.0000000 0 [547,] 0.0000000 0.0000000 0 [548,] 0.0000000 0.0000000 0 [549,] 0.0000000 0.0000000 0 [550,] 0.0000000 0.0000000 0 [551,] 0.0000000 0.0000000 0 [552,] 0.0000000 0.0000000 0 [553,] 0.9999251 0.9999251 1 [554,] 0.0000000 0.0000000 0 [555,] 0.0000000 0.0000000 0 [556,] 0.0000000 0.0000000 0 [557,] 0.0000000 0.0000000 0 [558,] 0.0000000 0.0000000 0 [559,] 0.9714083 0.9714083 1 [560,] 0.0000000 0.0000000 0 [561,] 0.0000000 0.0000000 0 [562,] 0.0000000 0.0000000 0 [563,] 0.0000000 0.0000000 0 [564,] 0.0000000 0.0000000 0 [565,] 0.0000000 0.0000000 0 [566,] 0.0000000 0.0000000 0 [567,] 0.0000000 0.0000000 0 [568,] 0.0000000 0.0000000 0 [569,] 0.0000000 0.0000000 0 [570,] 0.0000000 0.0000000 0 [571,] 2.9214995 2.9214995 3 [572,] 0.0000000 0.0000000 0 [573,] 0.0000000 0.0000000 0 [574,] 0.0000000 0.0000000 0 [575,] 0.0000000 0.0000000 0 [576,] 0.0000000 0.0000000 0 [577,] 0.0000000 0.0000000 0 [578,] 0.0000000 0.0000000 0 [579,] 0.0000000 0.0000000 0 [580,] 0.0000000 0.0000000 0 [581,] 0.0000000 0.0000000 0 [582,] 0.0000000 0.0000000 0 [583,] 0.0000000 0.0000000 0 [584,] 0.0000000 0.0000000 0 [585,] 0.0000000 0.0000000 0 [586,] 0.0000000 0.0000000 0 [587,] 0.0000000 0.0000000 0 [588,] 0.0000000 0.0000000 0 [589,] 0.0000000 0.0000000 0 [590,] 0.0000000 0.0000000 0 [591,] 0.0000000 0.0000000 0 [592,] 0.0000000 0.0000000 0 [593,] 0.0000000 0.0000000 0 [594,] 0.0000000 0.0000000 0 [595,] 0.8023685 0.8023685 1 [596,] 0.0000000 0.0000000 0 [597,] 0.0000000 0.0000000 0 [598,] 0.0000000 0.0000000 0 [599,] 0.0000000 0.0000000 0 [600,] 0.0000000 0.0000000 0 > > > > > > cleanEx() > nameEx("computeSenderIndegree") > ### * computeSenderIndegree > > flush(stderr()); flush(stdout()) > > ### Name: computeSenderIndegree > ### Title: Compute the Indegree Network Statistic for Event Senders in a > ### Relational Event Sequence > ### Aliases: computeSenderIndegree > > ### ** Examples > > events <- data.frame(time = 1:18, + eventID = 1:18, + sender = c("A", "B", "C", + "A", "D", "E", + "F", "B", "A", + "F", "D", "B", + "G", "B", "D", + "H", "A", "D"), + target = c("B", "C", "D", + "E", "A", "F", + "D", "A", "C", + "G", "B", "C", + "H", "J", "A", + "F", "C", "B")) > > eventSet <- processOMEventSeq(data = events, + time = events$time, + eventID = events$eventID, + sender = events$sender, + receiver = events$target, + p_samplingobserved = 1.00, + n_controls = 1, + seed = 9999) > > # Computing Sender Indegree Statistics without the sliding windows framework > eventSet$sender.indegree <- computeSenderIndegree( + observed_time = events$time, + observed_sender = events$sender, + observed_receiver = events$target, + processed_time = eventSet$time, + processed_sender = eventSet$sender, + processed_receiver = eventSet$receiver, + halflife = 2, #halflife parameter + dyadic_weight = 0, + Lerneretal_2013 = FALSE) Warning: `computeSenderIndegree()` was deprecated in dream 1.0.0. ℹ Please use `remstats_degree()` instead. > > # Computing Sender Indegree Statistics with the sliding windows framework > eventSet$sender.indegree.SW <- computeSenderIndegree( + observed_time = events$time, + observed_sender = events$sender, + observed_receiver = events$target, + processed_time = eventSet$time, + processed_sender = eventSet$sender, + processed_receiver = eventSet$receiver, + halflife = 2, #halflife parameter + processed_seqIDs = eventSet$sequenceID, + dyadic_weight = 0, + sliding_window = TRUE, + Lerneretal_2013 = FALSE) > > #The results with and without the sliding windows are the same (see correlation > #below). Using the sliding windows method is recommended when the data are > #big' so that memory allotment is more efficient. > cor(eventSet$sender.indegree.SW,eventSet$sender.indegree) [1] 1 > > # Computing Sender Indegree Statistics with the counts of events being returned > eventSet$sender.indegreeC <- computeSenderIndegree( + observed_time = events$time, + observed_sender = events$sender, + observed_receiver = events$target, + processed_time = eventSet$time, + processed_sender = eventSet$sender, + processed_receiver = eventSet$receiver, + halflife = 2, #halflife parameter + dyadic_weight = 0, + Lerneretal_2013 = FALSE, + counts = TRUE) > > cbind(eventSet$sender.indegree.SW, + eventSet$sender.indegree, + eventSet$sender.indegreeC) [,1] [,2] [,3] [1,] 0.00000000 0.00000000 0 [2,] 0.70710678 0.70710678 1 [3,] 0.00000000 0.00000000 0 [4,] 0.70710678 0.70710678 1 [5,] 0.00000000 0.00000000 0 [6,] 0.00000000 0.00000000 0 [7,] 0.70710678 0.70710678 1 [8,] 0.50000000 0.50000000 1 [9,] 0.25000000 0.25000000 1 [10,] 0.50000000 0.50000000 1 [11,] 0.50000000 0.50000000 1 [12,] 0.70710678 0.70710678 1 [13,] 0.35355339 0.35355339 1 [14,] 0.08838835 0.08838835 1 [15,] 0.25000000 0.25000000 1 [16,] 0.95710678 0.95710678 2 [17,] 0.17677670 0.17677670 1 [18,] 0.25000000 0.25000000 1 [19,] 0.12500000 0.12500000 1 [20,] 0.31250000 0.31250000 2 [21,] 0.70710678 0.70710678 1 [22,] 0.72920387 0.72920387 2 [23,] 0.50000000 0.50000000 1 [24,] 0.35355339 0.35355339 1 [25,] 0.35355339 0.35355339 1 [26,] 0.36460193 0.36460193 2 [27,] 0.11048543 0.11048543 2 [28,] 0.07812500 0.07812500 2 [29,] 0.50000000 0.50000000 1 [30,] 0.35355339 0.35355339 1 [31,] 0.50000000 0.50000000 1 [32,] 0.55981917 0.55981917 3 [33,] 0.72920387 0.72920387 2 [34,] 0.02762136 0.02762136 2 [35,] 0.02762136 0.02762136 2 > > > > cleanEx() > nameEx("computeSenderOutdegree") > ### * computeSenderOutdegree > > flush(stderr()); flush(stdout()) > > ### Name: computeSenderOutdegree > ### Title: Compute the Outdegree Network Statistic for Event Senders in a > ### Relational Event Sequence > ### Aliases: computeSenderOutdegree > > ### ** Examples > > events <- data.frame(time = 1:18, + eventID = 1:18, + sender = c("A", "B", "C", + "A", "D", "E", + "F", "B", "A", + "F", "D", "B", + "G", "B", "D", + "H", "A", "D"), + target = c("B", "C", "D", + "E", "A", "F", + "D", "A", "C", + "G", "B", "C", + "H", "J", "A", + "F", "C", "B")) > > eventSet <- processOMEventSeq(data = events, + time = events$time, + eventID = events$eventID, + sender = events$sender, + receiver = events$target, + p_samplingobserved = 1.00, + n_controls = 1, + seed = 9999) > > # Computing Sender Outdegree Statistics without the sliding windows framework > eventSet$sender_outdegree <- computeSenderOutdegree( + observed_time = events$time, + observed_sender = events$sender, + processed_time = eventSet$time, + processed_sender = eventSet$sender, + halflife = 2, #halflife parameter + dyadic_weight = 0, + Lerneretal_2013 = FALSE) Warning: `computeSenderOutdegree()` was deprecated in dream 1.0.0. ℹ Please use `remstats_degree()` instead. > > # Computing Sender Outdegree Statistics with the sliding windows framework > eventSet$sender_outdegreeSW <- computeSenderOutdegree( + observed_time = events$time, + observed_sender = events$sender, + processed_time = eventSet$time, + processed_sender = eventSet$sender, + halflife = 2, #halflife parameter + processed_seqIDs = eventSet$sequenceID, + dyadic_weight = 0, + sliding_window = TRUE, + Lerneretal_2013 = FALSE) > > #The results with and without the sliding windows are the same (see correlation > #below). Using the sliding windows method is recommended when the data are > #big' so that memory allotment is more efficient. > cor(eventSet$sender_outdegreeSW , eventSet$sender_outdegree) [1] 1 > > # Computing Sender Outdegree Statistic with the counts of events being returned > eventSet$sender_outdegreeC <- computeSenderOutdegree( + observed_time = events$time, + observed_sender = events$sender, + processed_time = eventSet$time, + processed_sender = eventSet$sender, + halflife = 2, #halflife parameter + dyadic_weight = 0, + sliding_window = FALSE, + counts = TRUE, + Lerneretal_2013 = FALSE) > > cbind(eventSet$sender_outdegree, + eventSet$sender_outdegreeSW, + eventSet$sender_outdegreeC) [,1] [,2] [,3] [1,] 0.00000000 0.00000000 0 [2,] 0.00000000 0.00000000 0 [3,] 0.00000000 0.00000000 0 [4,] 0.00000000 0.00000000 0 [5,] 0.70710678 0.70710678 1 [6,] 0.35355339 0.35355339 1 [7,] 0.35355339 0.35355339 1 [8,] 0.00000000 0.00000000 0 [9,] 0.35355339 0.35355339 1 [10,] 0.00000000 0.00000000 0 [11,] 0.70710678 0.70710678 1 [12,] 0.00000000 0.00000000 0 [13,] 0.25000000 0.25000000 1 [14,] 0.12500000 0.12500000 1 [15,] 0.17677670 0.17677670 1 [16,] 0.23927670 0.23927670 2 [17,] 0.79549513 0.79549513 2 [18,] 0.35355339 0.35355339 1 [19,] 0.25000000 0.25000000 1 [20,] 0.12500000 0.12500000 1 [21,] 0.17677670 0.17677670 1 [22,] 0.28125000 0.28125000 2 [23,] 0.12500000 0.12500000 1 [24,] 0.00000000 0.00000000 0 [25,] 0.03125000 0.03125000 1 [26,] 0.64062500 0.64062500 3 [27,] 0.64062500 0.64062500 3 [28,] 0.28125000 0.28125000 2 [29,] 0.01562500 0.01562500 1 [30,] 0.00000000 0.00000000 0 [31,] 0.82031250 0.82031250 4 [32,] 0.07745479 0.07745479 3 [33,] 0.11963835 0.11963835 2 [34,] 0.45299028 0.45299028 3 [35,] 0.41015625 0.41015625 4 > > > > cleanEx() > nameEx("computeTMDegree") > ### * computeTMDegree > > flush(stderr()); flush(stdout()) > > ### Name: computeTMDegree > ### Title: Compute Degree Centrality Values for Two-Mode Networks > ### Aliases: computeTMDegree > > ### ** Examples > > #Replicating the biparitate graph presented in Knoke and Yang (2020: 109) > knoke_yang_PC <- matrix(c(1,1,0,0, 1,1,0,0, + 1,1,1,0, 0,0,1,1, + 0,0,1,1), byrow = TRUE, + nrow = 5, ncol = 4) > colnames(knoke_yang_PC) <- c("Rubio-R","McConnell-R", "Reid-D", "Sanders-D") > rownames(knoke_yang_PC) <- c("UPS", "MS", "HD", "SEU", "ANA") > computeTMDegree(knoke_yang_PC, level1 = TRUE) #this value matches the book Warning: `computeTMDegree()` was deprecated in dream 1.0.0. ℹ Please use `netstats_tm_degreecent()` instead. UPS MS HD SEU ANA 2 2 4 2 2 > computeTMDegree(knoke_yang_PC, level1 = FALSE) #this value matches the book Rubio-R McConnell-R Reid-D Sanders-D 2 2 3 1 > > > > cleanEx() > nameEx("computeTMDens") > ### * computeTMDens > > flush(stderr()); flush(stdout()) > > ### Name: computeTMDens > ### Title: Compute Level-Specific Graph Density for Two-Mode Networks > ### Aliases: computeTMDens > > ### ** Examples > > #Replicating the biparitate graph presented in Knoke and Yang (2020: 109) > knoke_yang_PC <- matrix(c(1,1,0,0, 1,1,0,0, + 1,1,1,0, 0,0,1,1, + 0,0,1,1), byrow = TRUE, + nrow = 5, ncol = 4) > colnames(knoke_yang_PC) <- c("Rubio-R","McConnell-R", "Reid-D", "Sanders-D") > rownames(knoke_yang_PC) <- c("UPS", "MS", "HD", "SEU", "ANA") > #compute two-mode density for level 1 > #note: this value does not match that of Knoke and Yang (which we believe > #is a typo in that book), but does match that of Wasserman and > #Faust (1995: 317) for the ceo dataset. > computeTMDens(knoke_yang_PC, level1 = TRUE) Warning: `computeTMDens()` was deprecated in dream 1.0.0. ℹ Please use `netstats_tm_density()` instead. [1] 1 > #compute two-mode density for level 2. > #note: this value matches that of the book > computeTMDens(knoke_yang_PC, level1 = FALSE) [1] 1.166667 > > > > > cleanEx() > nameEx("computeTMEgoDis") > ### * computeTMEgoDis > > flush(stderr()); flush(stdout()) > > ### Name: computeTMEgoDis > ### Title: Compute Fujimoto, Snijders, and Valente's (2018) Ego Homophily > ### Distance for Two-Mode Networks > ### Aliases: computeTMEgoDis > > ### ** Examples > > > # For this example, we use the Davis Southern Women's Dataset. > data("southern.women") > #creating a random binary membership vector > set.seed(9999) > membership <- sample(0:1, nrow(southern.women), replace = TRUE) > #the ego 2 mode distance non-standardized > computeTMEgoDis(southern.women, mem = membership) Warning: `computeTMEgoDis()` was deprecated in dream 1.0.0. ℹ Please use `netstats_tm_egodistance()` instead. [1] 3.846553 3.745543 2.611011 2.247375 1.796825 2.301099 2.345543 1.875125 [9] 2.176712 1.449106 2.260839 1.871329 3.371950 3.470851 2.618803 2.260839 [17] 1.030303 1.030303 > #the ego 2 mode distance standardized > computeTMEgoDis(southern.women, mem = membership, standardize = TRUE) EVELYN LAURA THERESA BRENDA CHARLOTTE FRANCES ELEANOR PEARL 0.4808192 0.5350776 0.3263764 0.3210535 0.4492063 0.5752747 0.5863858 0.6250416 RUTH VERNE MYRA KATHERINE SYLVIA NORA HELEN DOROTHY 0.5441780 0.3622766 0.5652098 0.3118881 0.4817072 0.4338564 0.3741148 0.5652098 OLIVIA FLORA 0.5151515 0.5151515 > > > > > cleanEx() > nameEx("computeTriads") > ### * computeTriads > > flush(stderr()); flush(stdout()) > > ### Name: computeTriads > ### Title: Compute the Triadic Closure Network Statistic for Event Dyads in > ### a Relational Event Sequence > ### Aliases: computeTriads > > ### ** Examples > > events <- data.frame(time = 1:18, + eventID = 1:18, + sender = c("A", "B", "C", + "A", "D", "E", + "F", "B", "A", + "F", "D", "B", + "G", "B", "D", + "H", "A", "D"), + target = c("B", "C", "D", + "E", "A", "F", + "D", "A", "C", + "G", "B", "C", + "H", "J", "A", + "F", "C", "B")) > > eventSet <- processOMEventSeq(data = events, + time = events$time, + eventID = events$eventID, + sender = events$sender, + receiver = events$target, + p_samplingobserved = 1.00, + n_controls = 1, + seed = 9999) > > # Computing Triadic Statistics without the sliding windows framework > eventSet$triadic <- computeTriads( + observed_time = events$time, + observed_sender = events$sender, + observed_receiver = events$target, + processed_time = eventSet$time, + processed_sender = eventSet$sender, + processed_receiver = eventSet$receiver, + halflife = 2, #halflife parameter + dyadic_weight = 0, + Lerneretal_2013 = FALSE) Warning: `computeTriads()` was deprecated in dream 1.0.0. ℹ Please use `remstats_triads()` instead. > > # Computing Triadic Statistics with the sliding windows framework > eventSet$triadicSW <- computeTriads( + observed_time = events$time, + observed_sender = events$sender, + observed_receiver = events$target, + processed_time = eventSet$time, + processed_sender = eventSet$sender, + processed_receiver = eventSet$receiver, + halflife = 2, #halflife parameter + processed_seqIDs = eventSet$sequenceID, + dyadic_weight = 0, + sliding_window = TRUE, + Lerneretal_2013 = FALSE) Setting up data structure for the sliding windows framework....... > > #The results with and without the sliding windows are the same (see correlation > #below). Using the sliding windows method is recommended when the data are > #big' so that memory allotment is more efficient. > cor(eventSet$triadic , eventSet$triadicSW) [1] 1 > > # Computing Triadic Statistics with the counts of events being returned > eventSet$triadicC <- computeTriads( + observed_time = events$time, + observed_sender = events$sender, + observed_receiver = events$target, + processed_time = eventSet$time, + processed_sender = eventSet$sender, + processed_receiver = eventSet$receiver, + halflife = 2, #halflife parameter + dyadic_weight = 0, + sliding_window = FALSE, + counts = TRUE, + Lerneretal_2013 = FALSE) > > cbind(eventSet$triadic, + eventSet$triadicSW, + eventSet$triadicC) [,1] [,2] [,3] [1,] 0.0000000 0.0000000 0 [2,] 0.0000000 0.0000000 0 [3,] 0.0000000 0.0000000 0 [4,] 0.0000000 0.0000000 0 [5,] 0.5946036 0.5946036 1 [6,] 0.0000000 0.0000000 0 [7,] 0.0000000 0.0000000 0 [8,] 0.0000000 0.0000000 0 [9,] 0.4204482 0.4204482 1 [10,] 0.0000000 0.0000000 0 [11,] 0.0000000 0.0000000 0 [12,] 0.0000000 0.0000000 0 [13,] 0.0000000 0.0000000 0 [14,] 0.0000000 0.0000000 0 [15,] 0.6123724 0.6123724 2 [16,] 0.3150782 0.3150782 2 [17,] 0.3688476 0.3688476 1 [18,] 0.0000000 0.0000000 0 [19,] 0.2227939 0.2227939 2 [20,] 0.2255273 0.2255273 2 [21,] 0.0000000 0.0000000 0 [22,] 0.3570026 0.3570026 2 [23,] 0.1486509 0.1486509 1 [24,] 0.0000000 0.0000000 0 [25,] 0.0922119 0.0922119 1 [26,] 0.0000000 0.0000000 0 [27,] 0.0000000 0.0000000 0 [28,] 0.1612555 0.1612555 2 [29,] 0.0000000 0.0000000 0 [30,] 0.2102241 0.2102241 1 [31,] 0.0000000 0.0000000 0 [32,] 0.1131242 0.1131242 3 [33,] 0.0156250 0.0156250 1 [34,] 0.1145120 0.1145120 3 [35,] 0.1613742 0.1613742 3 > > > > cleanEx() > nameEx("create_riskset") > ### * create_riskset > > flush(stderr()); flush(stdout()) > > ### Name: create_riskset > ### Title: Process and Create Risk Sets for a One- and Two-Mode Relational > ### Event Sequences > ### Aliases: create_riskset > > ### ** Examples > > > data("WikiEvent2018.first100k") > WikiEvent2018.first100k$time <- as.numeric(WikiEvent2018.first100k$time) > ### Creating the EventSet By Employing Case-Control Sampling With M = 10 and > ### Sampling from the Observed Event Sequence with P = 0.01 > EventSet <- create_riskset( + type = "two-mode", + time = WikiEvent2018.first100k$time, # The Time Variable + eventID = WikiEvent2018.first100k$eventID, # The Event Sequence Variable + sender = WikiEvent2018.first100k$user, # The Sender Variable + receiver = WikiEvent2018.first100k$article, # The Receiver Variable + p_samplingobserved = 0.01, # The Probability of Selection + n_controls = 10, # The Number of Controls to Sample from the Full Risk Set + seed = 9999) # The Seed for Replication > > > ### Creating A New EventSet with more observed events and less control events > ### Sampling from the Observed Event Sequence with P = 0.02 > ### Employing Case-Control Sampling With M = 2 > EventSet1 <- create_riskset( + type = "two-mode", + time = WikiEvent2018.first100k$time, # The Time Variable + eventID = WikiEvent2018.first100k$eventID, # The Event Sequence Variable + sender = WikiEvent2018.first100k$user, # The Sender Variable + receiver = WikiEvent2018.first100k$article, # The Receiver Variable + p_samplingobserved = 0.02, # The Probability of Selection + n_controls = 2, # The Number of Controls to Sample from the Full Risk Set + seed = 9999) # The Seed for Replication > > > > > cleanEx() > nameEx("estimateREM") > ### * estimateREM > > flush(stderr()); flush(stdout()) > > ### Name: estimateREM > ### Title: Fit a Relational Event Model (REM) to Event Sequence Data > ### Aliases: estimateREM > > ### ** Examples > > #Creating a psuedo one-mode relational event sequence with ordinal timing > relational.seq <- simulateRESeq(n_actors = 8, + n_events = 50, + inertia = TRUE, + inertia_p = 0.10, + sender_outdegree = TRUE, + sender_outdegree_p = 0.05) Warning: `simulateRESeq()` was deprecated in dream 1.0.0. ℹ Please use `simulate_rem_seq()` instead. > > #Creating a post-processing event sequence for the above relational sequence > post.processing <- processOMEventSeq(data = relational.seq, + time = relational.seq$eventID, + eventID = relational.seq$eventID, + sender = relational.seq$sender, + receiver = relational.seq$target, + n_controls = 5) > > #Computing the sender-outdegree statistic for the above post-processing > #one-mode relational event sequence > post.processing$sender.outdegree <- computeSenderOutdegree( + observed_time = relational.seq$eventID, + observed_sender = relational.seq$sender, + processed_time = post.processing$time, + processed_sender = post.processing$sender, + counts = TRUE) > > #Computing the inertia/repetition statistic for the above post-processing > #one-mode relational event sequence > post.processing$inertia <- computeRepetition( + observed_time = relational.seq$eventID, + observed_sender = relational.seq$sender, + observed_receiver = relational.seq$target, + processed_time = post.processing$time, + processed_sender = post.processing$sender, + processed_receiver = post.processing$receiver, + counts = TRUE) > > #Fitting a (ordinal) relational event model to the above one-mode relational > #event sequence > rem <- estimateREM(observed~sender.outdegree+inertia, + event.cluster = post.processing$time, + data=post.processing) Warning: `estimateREM()` was deprecated in dream 1.0.0. ℹ Please use `estimate_rem_logit()` instead. Extracting user-provided data. Prepping data for numerical optimization. Starting optimzation for parameters. Optimzation via Netwon's Method is complete. > summary(rem) #summary of the relational event model Ordinal Timing Relational Event Model Call: estimateREM(formula = observed ~ sender.outdegree + inertia, event.cluster = post.processing$time, data = post.processing) n events: 49 null events: 242 Coefficients: Estimate Std. Error z value Pr(>|z|) sender.outdegree 0.0657 0.0928 0.7081 0.4789 inertia -0.1456 0.2629 -0.5538 0.5797 Null Likelihood: -87.1031 Model Likelihood: -86.8029 Likelihood Ratio Test: 0.6003 with df: 2 p-value: 0.7407 AIC 177.6058 BIC 181.3894 Number of Newton Iterations: 4 > > #Fitting a (ordinal) relational event model to the above one-mode relational > #event sequence via the optim function > rem1 <- estimateREM(observed~sender.outdegree+inertia, + event.cluster = post.processing$time, + data=post.processing, + newton.rhapson=FALSE) #use the optim function Extracting user-provided data. Prepping data for numerical optimization. Starting optimzation for parameters. > summary(rem1) #summary of the relational event model Ordinal Timing Relational Event Model Call: estimateREM(formula = observed ~ sender.outdegree + inertia, event.cluster = post.processing$time, data = post.processing, newton.rhapson = FALSE) n events: 49 null events: 242 Coefficients: Estimate Std. Error z value Pr(>|z|) sender.outdegree 0.0657 0.0928 0.7081 0.4789 inertia -0.1456 0.2629 -0.5538 0.5797 Null Likelihood: -87.1031 Model Likelihood: -86.8029 Likelihood Ratio Test: 0.6003 with df: 2 p-value: 0.7407 AIC 177.6058 BIC 181.3894 > > > > cleanEx() > nameEx("estimate_rem_logit") > ### * estimate_rem_logit > > flush(stderr()); flush(stdout()) > > ### Name: estimate_rem_logit > ### Title: Fit a Relational Event Model (REM) to Event Sequence Data > ### Aliases: estimate_rem_logit > > ### ** Examples > > #Creating a psuedo one-mode relational event sequence with ordinal timing > relational.seq <- simulate_rem_seq(n_actors = 8, + n_events = 50, + inertia = TRUE, + inertia_p = 0.10, + sender_outdegree = TRUE, + sender_outdegree_p = 0.05) > > #Creating a post-processing event sequence for the above relational sequence > post.processing <- create_riskset(type = "one-mode", + time = relational.seq$eventID, + eventID = relational.seq$eventID, + sender = as.character(relational.seq$sender), + receiver = as.character(relational.seq$target), + n_controls = 5) > > #Computing the sender-outdegree statistic for the above post-processing > #one-mode relational event sequence > post.processing$sender.outdegree <- remstats_degree(formation = "sender-outdegree", + time = post.processing$time, + observed = post.processing$observed, + sampled = post.processing$sampled, + sender = post.processing$sender, + receiver = post.processing$receiver, + halflife = 2, #halflife parameter + dyadic_weight = 0, + exp_weight_form = FALSE) > > #Computing the inertia/repetition statistic for the above post-processing > #one-mode relational event sequence > post.processing$inertia <- remstats_repetition(time = post.processing$time, + observed = post.processing$observed, + sampled = post.processing$sampled, + sender = post.processing$sender, + receiver = post.processing$receiver, + halflife = 2, #halflife parameter + dyadic_weight = 0, + exp_weight_form = FALSE) > > #Fitting a (ordinal) relational event model to the above one-mode relational > #event sequence > rem <- estimate_rem_logit(observed~ sender.outdegree + inertia, + event.cluster = post.processing$time, + data=post.processing) Extracting user-provided data. Prepping data for numerical optimization. Starting optimzation for parameters. Optimzation via Netwon's Method is complete. > summary(rem) #summary of the relational event model Ordinal Timing Relational Event Model Call: estimate_rem_logit(formula = observed ~ sender.outdegree + inertia, event.cluster = post.processing$time, data = post.processing) n events: 50 null events: 250 Coefficients: Estimate Std. Error z value Pr(>|z|) sender.outdegree 0.3381 0.4659 0.7257 0.4680 inertia 0.1421 1.1824 0.1202 0.9043 Null Likelihood: -89.588 Model Likelihood: -89.2692 Likelihood Ratio Test: 0.6376 with df: 2 p-value: 0.727 AIC 182.5383 BIC 186.3623 Number of Newton Iterations: 5 > > #Fitting a (ordinal) relational event model to the above one-mode relational > #event sequence via the optim function > rem1 <- estimate_rem_logit(observed~ sender.outdegree + inertia, + event.cluster = post.processing$time, + data=post.processing, + newton.rhapson=FALSE) Extracting user-provided data. Prepping data for numerical optimization. Starting optimzation for parameters. > summary(rem1) #summary of the relational event model Ordinal Timing Relational Event Model Call: estimate_rem_logit(formula = observed ~ sender.outdegree + inertia, event.cluster = post.processing$time, data = post.processing, newton.rhapson = FALSE) n events: 50 null events: 250 Coefficients: Estimate Std. Error z value Pr(>|z|) sender.outdegree 0.3381 0.4659 0.7257 0.4680 inertia 0.1421 1.1824 0.1202 0.9043 Null Likelihood: -89.588 Model Likelihood: -89.2692 Likelihood Ratio Test: 0.6376 with df: 2 p-value: 0.727 AIC 182.5383 BIC 186.3623 > > > > > > cleanEx() > nameEx("netstats_om_constraint") > ### * netstats_om_constraint > > flush(stderr()); flush(stdout()) > > ### Name: netstats_om_constraint > ### Title: Compute Burt's (1992) Constraint for Ego Networks from a > ### Sociomatrix > ### Aliases: netstats_om_constraint > > ### ** Examples > > > # For this example, we recreate the ego network provided in Burt (1992: 56): > BurtEgoNet <- matrix(c( + 0,1,0,0,1,1,1, + 1,0,0,1,0,0,1, + 0,0,0,0,0,0,1, + 0,1,0,0,0,0,1, + 1,0,0,0,0,0,1, + 1,0,0,0,0,0,1, + 1,1,1,1,1,1,0), + nrow = 7, ncol = 7) > colnames(BurtEgoNet) <- rownames(BurtEgoNet) <- c("A", "B", "C", "D", "E", + "F", "ego") > #the constraint value for the ego replicates that provided in Burt (1992: 56) > netstats_om_constraint(BurtEgoNet) A B C D E F ego 0.6835938 0.8395062 1.0000000 1.1250000 1.1250000 1.1250000 0.4002701 > > > > > > cleanEx() > nameEx("netstats_om_effective") > ### * netstats_om_effective > > flush(stderr()); flush(stdout()) > > ### Name: netstats_om_effective > ### Title: Compute Burt's (1992) Effective Size for Ego Networks from a > ### Sociomatrix > ### Aliases: netstats_om_effective > > ### ** Examples > > # For this example, we recreate the ego network provided in Borgatti (1997): > BorgattiEgoNet <- matrix( + c(0,1,0,0,0,0,0,0,1, + 1,0,0,0,0,0,0,0,1, + 0,0,0,1,0,0,0,0,1, + 0,0,1,0,0,0,0,0,1, + 0,0,0,0,0,1,0,0,1, + 0,0,0,0,1,0,0,0,1, + 0,0,0,0,0,0,0,1,1, + 0,0,0,0,0,0,1,0,1, + 1,1,1,1,1,1,1,1,0), + nrow = 9, ncol = 9, byrow = TRUE) > colnames(BorgattiEgoNet) <- rownames(BorgattiEgoNet) <- c("A", "B", "C", + "D", "E", "F", + "G", "H", "ego") > #the effective size value for the ego replicates that provided in Borgatti (1997) > netstats_om_effective(BorgattiEgoNet) A B C D E F G H ego 1 1 1 1 1 1 1 1 7 > > # For this example, we recreate the ego network provided in Burt (1992: 56): > BurtEgoNet <- matrix(c( + 0,1,0,0,1,1,1, + 1,0,0,1,0,0,1, + 0,0,0,0,0,0,1, + 0,1,0,0,0,0,1, + 1,0,0,0,0,0,1, + 1,0,0,0,0,0,1, + 1,1,1,1,1,1,0), + nrow = 7, ncol = 7) > colnames(BurtEgoNet) <- rownames(BurtEgoNet) <- c("A", "B", "C", "D", "E", + "F", "ego") > #the effective size value for the ego replicates that provided in Burt (1992: 56) > netstats_om_effective(BurtEgoNet) A B C D E F ego 2.500000 1.666667 1.000000 1.000000 1.000000 1.000000 4.666667 > > > > cleanEx() > nameEx("netstats_om_nwalks") > ### * netstats_om_nwalks > > flush(stderr()); flush(stdout()) > > ### Name: netstats_om_nwalks > ### Title: Compute the Number of Walks of Length K in a One-Mode Network > ### Aliases: netstats_om_nwalks > > ### ** Examples > > > # For this example, we generate a random one-mode graph with the sna package. > #creating the random network with 10 actors > set.seed(9999) > rnet <- matrix(sample(c(0,1), 10*10, replace = TRUE, prob = c(0.8,0.2)), + nrow = 10, ncol = 10, byrow = TRUE) > diag(rnet) <- 0 #setting self ties to 0 > #counting the walks of length 2 > netstats_om_nwalks(rnet, k = 2) [,1] [,2] [,3] [,4] [,5] [,6] [,7] [,8] [,9] [,10] [1,] 0 2 0 0 1 0 0 2 0 1 [2,] 0 1 1 1 1 1 0 3 2 1 [3,] 1 0 0 0 0 1 1 0 1 0 [4,] 0 1 1 0 0 0 0 1 0 1 [5,] 1 1 0 0 0 1 1 0 1 0 [6,] 1 1 1 0 0 1 1 0 1 0 [7,] 0 0 0 0 1 0 0 2 1 1 [8,] 0 0 0 0 0 0 0 0 0 0 [9,] 0 0 0 0 0 0 0 0 0 0 [10,] 0 0 0 0 0 0 0 0 0 0 > #counting the walks of length 5 > netstats_om_nwalks(rnet, k = 5) [,1] [,2] [,3] [,4] [,5] [,6] [,7] [,8] [,9] [,10] [1,] 3 7 3 1 5 4 3 11 7 5 [2,] 4 8 4 3 5 7 4 13 11 5 [3,] 3 4 3 1 1 4 3 4 5 2 [4,] 1 4 2 1 3 2 1 7 4 3 [5,] 4 7 4 1 3 5 4 8 7 4 [6,] 4 8 5 2 4 6 4 11 9 5 [7,] 1 1 1 1 1 2 1 3 3 1 [8,] 0 0 0 0 0 0 0 0 0 0 [9,] 0 0 0 0 0 0 0 0 0 0 [10,] 0 0 0 0 0 0 0 0 0 0 > > > > cleanEx() > nameEx("netstats_om_pib") > ### * netstats_om_pib > > flush(stderr()); flush(stdout()) > > ### Name: netstats_om_pib > ### Title: Compute Potential for Intercultural Brokerage (PIB) Based on > ### Leal (2025) > ### Aliases: netstats_om_pib > > ### ** Examples > > > # For this example, we recreate Figure 3 in Leal (2025) > LealNet <- matrix( c( + 0,1,0,0,0,0,0, + 1,0,1,1,0,0,0, + 0,1,0,0,1,1,0, + 0,1,0,0,1,0,0, + 0,0,1,1,0,0,0, + 0,0,1,0,0,0,1, + 0,0,0,0,0,1,0), + nrow = 7, ncol = 7, byrow = TRUE) > > colnames(LealNet) <- rownames(LealNet) <- c("A", "B", "C","D", + "E", "F", "G") > categorical_variable <- c(0,0,1,0,0,0,0) > #These values are exactly the same as reported by Leal (2025) > netstats_om_pib(LealNet, + symmetric = TRUE, + g.mem = categorical_variable) ANY A 0.0 B 1.5 C 0.0 D 0.0 E 0.5 F 1.0 G 0.0 > > > > > > > cleanEx() > nameEx("netstats_tm_constraint") > ### * netstats_tm_constraint > > flush(stderr()); flush(stdout()) > > ### Name: netstats_tm_constraint > ### Title: Compute Burchard and Cornwell's (2018) Two-Mode Constraint > ### Aliases: netstats_tm_constraint > > ### ** Examples > > > # For this example, we recreate Figure 2 in Burchard and Cornwell (2018: 13) > BCNet <- matrix( + c(1,1,0,0, + 1,0,1,0, + 1,0,0,1, + 0,1,1,1), + nrow = 4, ncol = 4, byrow = TRUE) > colnames(BCNet) <- c("1", "2", "3", "4") > rownames(BCNet) <- c("i", "j", "k", "m") > #library(sna) #To plot the two mode network, we use the sna R package > #gplot(BCNet, usearrows = FALSE, > # gmode = "twomode", displaylabels = TRUE) > netstats_tm_constraint(BCNet) i j k m 0.7500000 0.7500000 0.7500000 0.3333333 > > #For this example, we recreate Figure 9 in Burchard and Cornwell (2018:18) for > #weighted two mode networks. > BCweighted <- matrix(c(1,2,1, 1,0,0, + 0,2,1,0,0,1), + nrow = 4, ncol = 3, + byrow = TRUE) > rownames(BCweighted) <- c("i", "j", "k", "l") > netstats_tm_constraint(BCweighted, weighted = TRUE) i j k l 0.8888889 1.0000000 1.7500000 2.0000000 > > > > > > > > cleanEx() > nameEx("netstats_tm_degreecent") > ### * netstats_tm_degreecent > > flush(stderr()); flush(stdout()) > > ### Name: netstats_tm_degreecent > ### Title: Compute Degree Centrality Values for Two-Mode Networks > ### Aliases: netstats_tm_degreecent > > ### ** Examples > > #Replicating the biparitate graph presented in Knoke and Yang (2020: 109) > knoke_yang_PC <- matrix(c(1,1,0,0, 1,1,0,0, + 1,1,1,0, 0,0,1,1, + 0,0,1,1), byrow = TRUE, + nrow = 5, ncol = 4) > colnames(knoke_yang_PC) <- c("Rubio-R","McConnell-R", "Reid-D", "Sanders-D") > rownames(knoke_yang_PC) <- c("UPS", "MS", "HD", "SEU", "ANA") > netstats_tm_degreecent(knoke_yang_PC, level1 = TRUE) #this value matches the book UPS MS HD SEU ANA 2 2 4 2 2 > netstats_tm_degreecent(knoke_yang_PC, level1 = FALSE) #this value matches the book Rubio-R McConnell-R Reid-D Sanders-D 2 2 3 1 > > > > cleanEx() > nameEx("netstats_tm_density") > ### * netstats_tm_density > > flush(stderr()); flush(stdout()) > > ### Name: netstats_tm_density > ### Title: Compute Level-Specific Graph Density for Two-Mode Networks > ### Aliases: netstats_tm_density > > ### ** Examples > > #Replicating the biparitate graph presented in Knoke and Yang (2020: 109) > knoke_yang_PC <- matrix(c(1,1,0,0, 1,1,0,0, + 1,1,1,0, 0,0,1,1, + 0,0,1,1), byrow = TRUE, + nrow = 5, ncol = 4) > colnames(knoke_yang_PC) <- c("Rubio-R","McConnell-R", "Reid-D", "Sanders-D") > rownames(knoke_yang_PC) <- c("UPS", "MS", "HD", "SEU", "ANA") > #compute two-mode density for level 1 > #note: this value does not match that of Knoke and Yang (which we believe > #is a typo in that book), but does match that of Wasserman and > #Faust (1995: 317) for the ceo dataset. > netstats_tm_density(knoke_yang_PC, level1 = TRUE) [1] 1 > #compute two-mode density for level 2. > #note: this value matches that of the book > netstats_tm_density(knoke_yang_PC, level1 = FALSE) [1] 1.166667 > > > > > cleanEx() > nameEx("netstats_tm_effective") > ### * netstats_tm_effective > > flush(stderr()); flush(stdout()) > > ### Name: netstats_tm_effective > ### Title: Compute Burchard and Cornwell's (2018) Two-Mode Effective Size > ### Aliases: netstats_tm_effective > > ### ** Examples > > > # For this example, we recreate Figure 2 in Burchard and Cornwell (2018: 13) > BCNet <- matrix( + c(1,1,0,0, + 1,0,1,0, + 1,0,0,1, + 0,1,1,1), + nrow = 4, ncol = 4, byrow = TRUE) > colnames(BCNet) <- c("1", "2", "3", "4") > rownames(BCNet) <- c("i", "j", "k", "m") > #library(sna) #To plot the two mode network, we use the sna R package > #gplot(BCNet, usearrows = FALSE, > # gmode = "twomode", displaylabels = TRUE) > netstats_tm_effective(BCNet) i j k m 2.333333 2.333333 2.333333 3.000000 > > #In this example, we recreate Figure 9 in Burchard and Cornwell (2018:18) > #for weighted two mode networks. > BCweighted <- matrix(c(1,2,1, 1,0,0, + 0,2,1,0,0,1), + nrow = 4, ncol = 3, + byrow = TRUE) > rownames(BCweighted) <- c("i", "j", "k", "l") > netstats_tm_effective(BCweighted, weighted = TRUE) i j k l 2.444444 1.000000 1.166667 1.000000 > > > > > cleanEx() > nameEx("netstats_tm_egodistance") > ### * netstats_tm_egodistance > > flush(stderr()); flush(stdout()) > > ### Name: netstats_tm_egodistance > ### Title: Compute Fujimoto, Snijders, and Valente's (2018) Ego Homophily > ### Distance for Two-Mode Networks > ### Aliases: netstats_tm_egodistance > > ### ** Examples > > > # For this example, we use the Davis Southern Women's Dataset. > data("southern.women") > #creating a random binary membership vector > set.seed(9999) > membership <- sample(0:1, nrow(southern.women), replace = TRUE) > #the ego 2 mode distance non-standardized > netstats_tm_egodistance(southern.women, mem = membership) [1] 3.846553 3.745543 2.611011 2.247375 1.796825 2.301099 2.345543 1.875125 [9] 2.176712 1.449106 2.260839 1.871329 3.371950 3.470851 2.618803 2.260839 [17] 1.030303 1.030303 > #the ego 2 mode distance standardized > netstats_tm_egodistance(southern.women, mem = membership, standardize = TRUE) EVELYN LAURA THERESA BRENDA CHARLOTTE FRANCES ELEANOR PEARL 0.4808192 0.5350776 0.3263764 0.3210535 0.4492063 0.5752747 0.5863858 0.6250416 RUTH VERNE MYRA KATHERINE SYLVIA NORA HELEN DOROTHY 0.5441780 0.3622766 0.5652098 0.3118881 0.4817072 0.4338564 0.3741148 0.5652098 OLIVIA FLORA 0.5151515 0.5151515 > > > > > cleanEx() > nameEx("netstats_tm_homfourcycles") > ### * netstats_tm_homfourcycles > > flush(stderr()); flush(stdout()) > > ### Name: netstats_tm_homfourcycles > ### Title: Compute Fujimoto, Snijders, and Valente's (2018) Homophilous > ### Four-Cycles for Two-Mode Networks > ### Aliases: netstats_tm_homfourcycles > > ### ** Examples > > > # For this example, we use the Davis Southern Women's Dataset. > data("southern.women") > #creating a random binary membership vector > set.seed(9999) > membership <- sample(0:1, nrow(southern.women), replace = TRUE) > #the homophilous four-cycle values > netstats_tm_homfourcycles(southern.women, mem = membership) EVELYN LAURA THERESA BRENDA CHARLOTTE FRANCES ELEANOR PEARL 34 33 26 23 12 17 18 11 RUTH VERNE MYRA KATHERINE SYLVIA NORA HELEN DOROTHY 17 10 18 14 34 26 15 18 OLIVIA FLORA 1 1 > > > > cleanEx() > nameEx("netstats_tm_redundancy") > ### * netstats_tm_redundancy > > flush(stderr()); flush(stdout()) > > ### Name: netstats_tm_redundancy > ### Title: Compute Burchard and Cornwell's (2018) Two-Mode Redundancy > ### Aliases: netstats_tm_redundancy > > ### ** Examples > > > # For this example, we recreate Figure 2 in Burchard and Cornwell (2018: 13) > BCNet <- matrix( + c(1,1,0,0, + 1,0,1,0, + 1,0,0,1, + 0,1,1,1), + nrow = 4, ncol = 4, byrow = TRUE) > colnames(BCNet) <- c("1", "2", "3", "4") > rownames(BCNet) <- c("i", "j", "k", "m") > #this values replicate those reported by Burchard and Cornwell (2018: 14) > netstats_tm_redundancy(BCNet) i j k m i 0.0000000 0.3333333 0.3333333 0 j 0.3333333 0.0000000 0.3333333 0 k 0.3333333 0.3333333 0.0000000 0 m 0.0000000 0.0000000 0.0000000 0 > > > #For this example, we recreate Figure 9 in Burchard and Cornwell (2018:18) > #for weighted two mode networks. > BCweighted <- matrix(c(1,2,1, 1,0,0, + 0,2,1,0,0,1), + nrow = 4, ncol = 3, + byrow = TRUE) > rownames(BCweighted) <- c("i", "j", "k", "l") > netstats_tm_redundancy(BCweighted, weighted = TRUE) i j k l i 0.0000000 0 0.2222222 0.3333333 j 0.0000000 0 NA NA k 0.3333333 NA 0.0000000 0.5000000 l 0.5000000 NA 0.5000000 0.0000000 > > > > > > cleanEx() > nameEx("processOMEventSeq") > ### * processOMEventSeq > > flush(stderr()); flush(stdout()) > > ### Name: processOMEventSeq > ### Title: Process and Create Risk Sets for a One-Mode Relational Event > ### Sequence > ### Aliases: processOMEventSeq > > ### ** Examples > > # A random one-mode relational event sequence > set.seed(9999) > events <- data.frame(time = sort(rexp(1:18)), + eventID = 1:18, + sender = c("A", "B", "C", + "A", "D", "E", + "F", "B", "A", + "F", "D", "B", + "G", "B", "D", + "H", "A", "D"), + target = c("B", "C", "D", + "E", "A", "F", + "D", "A", "C", + "G", "B", "C", + "H", "J", "A", + "F", "C", "B")) > > # Creating a one-mode relational risk set with p = 1.00 (all true events) > # and 5 controls > eventSet <- processOMEventSeq(data = events, + time = events$time, + eventID = events$eventID, + sender = events$sender, + receiver = events$target, + p_samplingobserved = 1.00, + n_controls = 5, + seed = 9999) > > # Creating a event-dependent one-mode relational risk set with p = 1.00 (all > # true events) and 3 controls based upon the past 5 events prior to the current event. > events$timeseq <- 1:nrow(events) > eventSetT <- processOMEventSeq(data = events, + time = events$time, + eventID = events$eventID, + sender = events$sender, + receiver = events$target, + p_samplingobserved = 1.00, + time_dependent = TRUE, + timeDV = events$timeseq, + timeDif = 5, + n_controls = 3, + seed = 9999) > > # Creating a time-dependent one-mode relational risk set with p = 1.00 (all > # true events) and 3 controls based upon the past 0.40 time units. > eventSetT <- processOMEventSeq(data = events, + time = events$time, + eventID = events$eventID, + sender = events$sender, + receiver = events$target, + p_samplingobserved = 1.00, + time_dependent = TRUE, + timeDV = events$time, #the original time variable + timeDif = 0.40, #time difference of 0.40 units + n_controls = 3, + seed = 9999) > > > > cleanEx() > nameEx("processTMEventSeq") > ### * processTMEventSeq > > flush(stderr()); flush(stdout()) > > ### Name: processTMEventSeq > ### Title: Process and Create Risk Sets for a Two-Mode Relational Event > ### Sequence > ### Aliases: processTMEventSeq > > ### ** Examples > > > data("WikiEvent2018.first100k") > WikiEvent2018.first100k$time <- as.numeric(WikiEvent2018.first100k$time) > ### Creating the EventSet By Employing Case-Control Sampling With M = 10 and > ### Sampling from the Observed Event Sequence with P = 0.01 > EventSet <- processTMEventSeq( + data = WikiEvent2018.first100k, # The Event Dataset + time = WikiEvent2018.first100k$time, # The Time Variable + eventID = WikiEvent2018.first100k$eventID, # The Event Sequence Variable + sender = WikiEvent2018.first100k$user, # The Sender Variable + receiver = WikiEvent2018.first100k$article, # The Receiver Variable + p_samplingobserved = 0.01, # The Probability of Selection + n_controls = 10, # The Number of Controls to Sample from the Full Risk Set + seed = 9999) # The Seed for Replication > > > ### Creating A New EventSet with more observed events and less control events > ### Sampling from the Observed Event Sequence with P = 0.02 > ### Employing Case-Control Sampling With M = 2 > EventSet1 <- processTMEventSeq( + data = WikiEvent2018.first100k, # The Event Dataset + time = WikiEvent2018.first100k$time, # The Time Variable + eventID = WikiEvent2018.first100k$eventID, # The Event Sequence Variable + sender = WikiEvent2018.first100k$user, # The Sender Variable + receiver = WikiEvent2018.first100k$article, # The Receiver Variable + p_samplingobserved = 0.02, # The Probability of Selection + n_controls = 2, # The Number of Controls to Sample from the Full Risk Set + seed = 9999) # The Seed for Replication > > ### Creating An Event-Dependent EventSet with P = 0.001 and m = 5 with > ### where only actors involved in the past 20 events are involved in the > ### creation of the risk set. > event_dependent <- processTMEventSeq( + data = WikiEvent2018.first100k, + time = WikiEvent2018.first100k$time, + sender = WikiEvent2018.first100k$user, + receiver = WikiEvent2018.first100k$article, + eventID = WikiEvent2018.first100k$eventID, + p_samplingobserved = 0.001, + n_controls = 5, + time_dependent = TRUE, + timeDV = 1:nrow(WikiEvent2018.first100k), + timeDif = 20, #20 past events + seed = 9999) > ### Creating An Time-Dependent EventSet with P = 0.001 and m = 5 with > ### where only actors involved in the past 30 days are involved in the > ### creation of the risk set. > timeSinceStart <- WikiEvent2018.first100k$time-WikiEvent2018.first100k$time[1] > timeDifMonth <- 30*24*60*60*1000 > timedependent <- processTMEventSeq( + data = WikiEvent2018.first100k, + time = WikiEvent2018.first100k$time, + sender = WikiEvent2018.first100k$user, + receiver = WikiEvent2018.first100k$article, + eventID = WikiEvent2018.first100k$eventID, + p_samplingobserved = 0.001, + n_controls = 5, + time_dependent = TRUE, + timeDV = timeSinceStart, + timeDif = timeDifMonth, + seed = 9999) > > > > cleanEx() > nameEx("remstats_degree") > ### * remstats_degree > > flush(stderr()); flush(stdout()) > > ### Name: remstats_degree > ### Title: Compute Degree Network Statistics for Event Senders and > ### Receivers in a Relational Event Sequence > ### Aliases: remstats_degree > > ### ** Examples > > events <- data.frame(time = 1:18, eventID = 1:18, + sender = c("A", "B", "C", + "A", "D", "E", + "F", "B", "A", + "F", "D", "B", + "G", "B", "D", + "H", "A", "D"), + target = c("B", "C", "D", + "E", "A", "F", + "D", "A", "C", + "G", "B", "C", + "H", "J", "A", + "F", "C", "B")) > > eventSet <- create_riskset(type = "one-mode", + time = events$time, + eventID = events$eventID, + sender = events$sender, + receiver = events$target, + p_samplingobserved = 1.00, + n_controls = 1, + seed = 9999) > > #Computing the sender indegree statistic for the relational event sequence > eventSet$senderind <- remstats_degree( + formation = "sender-indegree", + time = as.numeric(eventSet$time), + observed = eventSet$observed, + sampled = rep(1,nrow(eventSet)), + sender = eventSet$sender, + receiver = eventSet$receiver, + halflife = 2, #halflife parameter + dyadic_weight = 0, + exp_weight_form = FALSE) > > #Computing the sender outdegree statistic for the relational event sequence > eventSet$senderout <- remstats_degree( + formation = "sender-outdegree", + time = as.numeric(eventSet$time), + observed = eventSet$observed, + sampled = rep(1,nrow(eventSet)), + sender = eventSet$sender, + receiver = eventSet$receiver, + halflife = 2, #halflife parameter + dyadic_weight = 0, + exp_weight_form = FALSE) > > > #Computing the receiver outdegree statistic for the relational event sequence > eventSet$recieverout <- remstats_degree( + formation = "receiver-outdegree", + time = as.numeric(eventSet$time), + observed = eventSet$observed, + sampled = rep(1,nrow(eventSet)), + sender = eventSet$sender, + receiver = eventSet$receiver, + halflife = 2, #halflife parameter + dyadic_weight = 0, + exp_weight_form = FALSE) > > > #Computing the receiver indegree statistic for the relational event sequence > eventSet$recieverind <- remstats_degree( + formation = "receiver-indegree", + time = as.numeric(eventSet$time), + observed = eventSet$observed, + sampled = rep(1,nrow(eventSet)), + sender = eventSet$sender, + receiver = eventSet$receiver, + halflife = 2, #halflife parameter + dyadic_weight = 0, + exp_weight_form = FALSE) > > > > cleanEx() > nameEx("remstats_dyadcut") > ### * remstats_dyadcut > > flush(stderr()); flush(stdout()) > > ### Name: remstats_dyadcut > ### Title: A Helper Function to Assist Researchers in Finding Dyadic Weight > ### Cutoff Values > ### Aliases: remstats_dyadcut > > ### ** Examples > > #To replicate the example in the details section: > # with the Lerner et al. 2013 weighting function > remstats_dyadcut(halflife = 15, + relationalWidth = 30, + exp_weight_form = TRUE) You are employing this function to find the corresponding dyadic cutoff value for temporal relevancy. The eventTime, relationalWidth, and halflife parameters must all be in the same measurement unit (e.g., hours, days). We hope you are providing the correct values... [1] 0.01155245 > > # without the Lerner et al. 2013 weighting function > remstats_dyadcut(halflife = 15, + relationalWidth = 30, + exp_weight_form = FALSE) You are employing this function to find the corresponding dyadic cutoff value for temporal relevancy. The eventTime, relationalWidth, and halflife parameters must all be in the same measurement unit (e.g., hours, days). We hope you are providing the correct values... [1] 0.25 > > # A result to test the function (should come out to 0.50) > remstats_dyadcut(halflife = 30, + relationalWidth = 30, + exp_weight_form = FALSE) You are employing this function to find the corresponding dyadic cutoff value for temporal relevancy. The eventTime, relationalWidth, and halflife parameters must all be in the same measurement unit (e.g., hours, days). We hope you are providing the correct values... [1] 0.5 > > > # Replicating Lerner and Lomi (2020): > #"We set T1/2 to 30 days so that an event counts as (close to) one in the very next instant of time, > #it counts as 1/2 one month later, it counts as 1/4 two months after the event, and so on. To reduce > #the memory consumption needed to store the network of past events, we set a dyadic weight to > #zero if its value drops below 0.01. If a single event occurred in some dyad this would happen after > #6.64×T1/2, that is after more than half a year." (Lerner and Lomi 2020: 104). > > # Based upon Lerner and Lomi (2020: 104), the result should be around 0.01. Since the > # time values in Lerner and Lomi (2020) are in milliseconds, we have to change > # all measurements into milliseconds > remstats_dyadcut(halflife = (30*24*60*60*1000), #30 days in milliseconds + relationalWidth = (6.64*30*24*60*60*1000), #Based upon the paper + #using the Lerner and Lomi (2020) weighting function + exp_weight_form = FALSE) You are employing this function to find the corresponding dyadic cutoff value for temporal relevancy. The eventTime, relationalWidth, and halflife parameters must all be in the same measurement unit (e.g., hours, days). We hope you are providing the correct values... [1] 0.01002676 > > > > > > cleanEx() > nameEx("remstats_fourcycles") > ### * remstats_fourcycles > > flush(stderr()); flush(stdout()) > > ### Name: remstats_fourcycles > ### Title: Compute the Four-Cycles Network Statistic for Event Dyads in a > ### Relational Event Sequence > ### Aliases: remstats_fourcycles > > ### ** Examples > > data("WikiEvent2018.first100k") > WikiEvent2018 <- WikiEvent2018.first100k[1:1000,] #the first one thousand events > WikiEvent2018$time <- as.numeric(WikiEvent2018$time) #making the variable numeric > ### Creating the EventSet By Employing Case-Control Sampling With M = 5 and > ### Sampling from the Observed Event Sequence with P = 0.01 > EventSet <-create_riskset(type = "two-mode", + time = WikiEvent2018$time, # The Time Variable + eventID = WikiEvent2018$eventID, # The Event Sequence Variable + sender = WikiEvent2018$user, # The Sender Variable + receiver = WikiEvent2018$article, # The Receiver Variable + p_samplingobserved = 0.01, # The Probability of Selection + n_controls = 8, # The Number of Controls to Sample from the Full Risk Set + combine = TRUE, + seed = 9999) # The Seed for Replication > > #Computing the four-cycles statistics for the relational event sequence with > #the exponential weights of past events returned > cycle4_weights <- remstats_fourcycles( + time = EventSet$time, + sender = EventSet$sender, + receiver = EventSet$receiver, + sampled = EventSet$sampled, + observed = EventSet$observed, + halflife = 2.592e+09, #halflife parameter + dyadic_weight = 0, + exp_weight_form = FALSE) > > > > #Computing the four-cycles statistics for the relational event sequence with > #the counts of past events returned > cycle4_counts <- remstats_fourcycles( + time = EventSet$time, + sender = EventSet$sender, + receiver = EventSet$receiver, + sampled = EventSet$sampled, + observed = EventSet$observed, + halflife = 2.592e+09, #halflife parameter + dyadic_weight = 0, + counts = TRUE) > > cbind(cycle4_weights, cycle4_counts) cycle4_weights cycle4_counts [1,] 0.000000 0 [2,] 0.000000 0 [3,] 0.000000 0 [4,] 0.000000 0 [5,] 0.000000 0 [6,] 0.000000 0 [7,] 0.000000 0 [8,] 0.000000 0 [9,] 0.000000 0 [10,] 0.000000 0 [11,] 0.000000 0 [12,] 0.000000 0 [13,] 0.000000 0 [14,] 0.000000 0 [15,] 0.000000 0 [16,] 0.000000 0 [17,] 0.000000 0 [18,] 0.000000 0 [19,] 0.000000 0 [20,] 0.000000 0 [21,] 0.000000 0 [22,] 0.000000 0 [23,] 0.000000 0 [24,] 0.000000 0 [25,] 0.000000 0 [26,] 0.000000 0 [27,] 0.000000 0 [28,] 0.000000 0 [29,] 1.730596 1 [30,] 0.000000 0 [31,] 0.000000 0 [32,] 0.000000 0 [33,] 0.000000 0 [34,] 0.000000 0 [35,] 0.000000 0 [36,] 0.000000 0 [37,] 0.000000 0 [38,] 0.000000 0 [39,] 0.000000 0 [40,] 0.000000 0 [41,] 0.000000 0 [42,] 0.000000 0 [43,] 0.000000 0 [44,] 0.000000 0 [45,] 0.000000 0 [46,] 0.000000 0 [47,] 0.000000 0 [48,] 0.000000 0 [49,] 0.000000 0 [50,] 0.000000 0 [51,] 0.000000 0 [52,] 0.000000 0 [53,] 0.000000 0 [54,] 0.000000 0 [55,] 0.000000 0 [56,] 0.000000 0 [57,] 0.000000 0 [58,] 0.000000 0 [59,] 0.000000 0 [60,] 0.000000 0 [61,] 0.000000 0 [62,] 0.000000 0 [63,] 0.000000 0 [64,] 0.000000 0 [65,] 0.000000 0 [66,] 0.000000 0 [67,] 0.000000 0 [68,] 0.000000 0 [69,] 0.000000 0 [70,] 0.000000 0 [71,] 0.000000 0 [72,] 0.000000 0 [73,] 0.000000 0 [74,] 0.000000 0 [75,] 0.000000 0 [76,] 0.000000 0 [77,] 0.000000 0 [78,] 0.000000 0 [79,] 0.000000 0 [80,] 0.000000 0 [81,] 0.000000 0 [82,] 0.000000 0 [83,] 0.000000 0 [84,] 0.000000 0 [85,] 0.000000 0 [86,] 0.000000 0 [87,] 0.000000 0 [88,] 0.000000 0 [89,] 0.000000 0 [90,] 0.000000 0 > > > > > cleanEx() > nameEx("remstats_persistence") > ### * remstats_persistence > > flush(stderr()); flush(stdout()) > > ### Name: remstats_persistence > ### Title: Compute Butts' (2008) Persistence Network Statistic for Event > ### Dyads in a Relational Event Sequence > ### Aliases: remstats_persistence > > ### ** Examples > > > > # A Dummy One-Mode Event Dataset > events <- data.frame(time = 1:18, + eventID = 1:18, + sender = c("A", "B", "C", + "A", "D", "E", + "F", "B", "A", + "F", "D", "B", + "G", "B", "D", + "H", "A", "D"), + target = c("B", "C", "D", + "E", "A", "F", + "D", "A", "C", + "G", "B", "C", + "H", "J", "A", + "F", "C", "B")) > > # Creating the Post-Processing Event Dataset with Null Events > eventSet <- create_riskset(type = "one-mode", + time = events$time, + eventID = events$eventID, + sender = events$sender, + receiver = events$target, + p_samplingobserved = 1.00, + n_controls = 6, + seed = 9999) > > #Computing the persistence statistic for the relational event sequence > eventSet$remstats_persistence <- remstats_persistence( + time = as.numeric(eventSet$time), + observed = eventSet$observed, + sampled = rep(1,nrow(eventSet)), + sender = eventSet$sender, + target = eventSet$receiver, + ref_sender = TRUE) > > > > > cleanEx() > nameEx("remstats_prefattachment") > ### * remstats_prefattachment > > flush(stderr()); flush(stdout()) > > ### Name: remstats_prefattachment > ### Title: Compute Butts' (2008) Preferential Attachment Network Statistic > ### for Event Dyads in a Relational Event Sequence > ### Aliases: remstats_prefattachment > > ### ** Examples > > > > # A Dummy One-Mode Event Dataset > events <- data.frame(time = 1:18, + eventID = 1:18, + sender = c("A", "B", "C", + "A", "D", "E", + "F", "B", "A", + "F", "D", "B", + "G", "B", "D", + "H", "A", "D"), + target = c("B", "C", "D", + "E", "A", "F", + "D", "A", "C", + "G", "B", "C", + "H", "J", "A", + "F", "C", "B")) > > # Creating the Post-Processing Event Dataset with Null Events > eventSet <- create_riskset( type = "one-mode", + time = events$time, + eventID = events$eventID, + sender = events$sender, + receiver = events$target, + p_samplingobserved = 1.00, + n_controls = 6, + seed = 9999) > > #Computing the preferential attachment statistic for the relational event sequence > eventSet$pref <- remstats_prefattachment( + time = as.numeric(eventSet$time), + observed = eventSet$observed, + sampled = rep(1,nrow(eventSet)), + sender = eventSet$sender, + receiver = eventSet$receiver) ==1096156== Conditional jump or move depends on uninitialised value(s) ==1096156== at 0x1BE0B6F8: computeremprefattach(Rcpp::Vector<14, Rcpp::PreserveStorage>, Rcpp::Vector<14, Rcpp::PreserveStorage>, Rcpp::Vector<14, Rcpp::PreserveStorage>, std::vector, std::allocator >, std::allocator, std::allocator > > >, std::vector, std::allocator >, std::allocator, std::allocator > > >) (packages/tests-vg/dream/src/prefattachincppREM.cpp:43) ==1096156== by 0x1BDDCB25: _dream_computeremprefattach (packages/tests-vg/dream/src/RcppExports.cpp:249) ==1096156== by 0x4A7DCD: R_doDotCall (svn/R-devel/src/main/dotcode.c:766) ==1096156== by 0x4E1E83: bcEval_loop (svn/R-devel/src/main/eval.c:8682) ==1096156== by 0x4F1FD7: bcEval (svn/R-devel/src/main/eval.c:7515) ==1096156== by 0x4F1FD7: bcEval (svn/R-devel/src/main/eval.c:7500) ==1096156== by 0x4F230A: Rf_eval (svn/R-devel/src/main/eval.c:1167) ==1096156== by 0x4F408D: R_execClosure (svn/R-devel/src/main/eval.c:2389) ==1096156== by 0x4F4D46: applyClosure_core (svn/R-devel/src/main/eval.c:2302) ==1096156== by 0x4F2415: Rf_applyClosure (svn/R-devel/src/main/eval.c:2324) ==1096156== by 0x4F2415: Rf_eval (svn/R-devel/src/main/eval.c:1280) ==1096156== by 0x4F6059: applydefine (svn/R-devel/src/main/eval.c:3384) ==1096156== by 0x4F2672: Rf_eval (svn/R-devel/src/main/eval.c:1232) ==1096156== by 0x528DCB: Rf_ReplIteration (svn/R-devel/src/main/main.c:264) ==1096156== Uninitialised value was created by a stack allocation ==1096156== at 0x1BE0B16D: computeremprefattach(Rcpp::Vector<14, Rcpp::PreserveStorage>, Rcpp::Vector<14, Rcpp::PreserveStorage>, Rcpp::Vector<14, Rcpp::PreserveStorage>, std::vector, std::allocator >, std::allocator, std::allocator > > >, std::vector, std::allocator >, std::allocator, std::allocator > > >) (packages/tests-vg/dream/src/prefattachincppREM.cpp:12) ==1096156== > > > > > cleanEx() > nameEx("remstats_recency") > ### * remstats_recency > > flush(stderr()); flush(stdout()) > > ### Name: remstats_recency > ### Title: Compute Butts' (2008) Recency Network Statistic for Event Dyads > ### in a Relational Event Sequence > ### Aliases: remstats_recency > > ### ** Examples > > > > # A Dummy One-Mode Event Dataset > events <- data.frame(time = 1:18, + eventID = 1:18, + sender = c("A", "B", "C", + "A", "D", "E", + "F", "B", "A", + "F", "D", "B", + "G", "B", "D", + "H", "A", "D"), + target = c("B", "C", "D", + "E", "A", "F", + "D", "A", "C", + "G", "B", "C", + "H", "J", "A", + "F", "C", "B")) > > # Creating the Post-Processing Event Dataset with Null Events > eventSet <- create_riskset(type = "one-mode", + time = events$time, + eventID = events$eventID, + sender = events$sender, + receiver = events$target, + p_samplingobserved = 1.00, + n_controls = 6, + seed = 9999) > > #Computing the recency statistics (with raw time difference) for the relational event sequence > eventSet$recency_rawdiff <- remstats_recency( + time = as.numeric(eventSet$time), + observed = eventSet$observed, + sampled = rep(1,nrow(eventSet)), + sender = eventSet$sender, + receiver = eventSet$receiver, + type = "raw.diff") > > #Computing the recency statistics (with inverse of time difference) for the > #relational event sequence > eventSet$recency_rawdiff <- remstats_recency( + time = as.numeric(eventSet$time), + observed = eventSet$observed, + sampled = rep(1,nrow(eventSet)), + sender = eventSet$sender, + receiver = eventSet$receiver, + type = "inv.diff.plus1") > > #Computing the rank-based recency statistics for the relational event sequence > eventSet$recency_rawdiff <- remstats_recency( + time = as.numeric(eventSet$time), + observed = eventSet$observed, + sampled = rep(1,nrow(eventSet)), + sender = eventSet$sender, + receiver = eventSet$receiver, + type = "rank.ordered.count") > > > > > cleanEx() > nameEx("remstats_reciprocity") > ### * remstats_reciprocity > > flush(stderr()); flush(stdout()) > > ### Name: remstats_reciprocity > ### Title: Compute the Reciprocity Network Statistic for Event Dyads in a > ### Relational Event Sequence > ### Aliases: remstats_reciprocity > > ### ** Examples > > events <- data.frame(time = 1:18, eventID = 1:18, + sender = c("A", "B", "C", + "A", "D", "E", + "F", "B", "A", + "F", "D", "B", + "G", "B", "D", + "H", "A", "D"), + target = c("B", "C", "D", + "E", "A", "F", + "D", "A", "C", + "G", "B", "C", + "H", "J", "A", + "F", "C", "B")) > > eventSet <-create_riskset(type = "one-mode", + time = events$time, + eventID = events$eventID, + sender = events$sender, + receiver = events$target, + p_samplingobserved = 1.00, + n_controls = 1, + seed = 9999) > > #Computing the reciprocity statistics for the relational event sequence > eventSet$recip <- remstats_reciprocity( + time = as.numeric(eventSet$time), + observed = eventSet$observed, + sampled = rep(1,nrow(eventSet)), + sender = eventSet$sender, + receiver = eventSet$receiver, + halflife = 2, #halflife parameter + dyadic_weight = 0, + exp_weight_form = FALSE) > > > > > cleanEx() > nameEx("remstats_repetition") > ### * remstats_repetition > > flush(stderr()); flush(stdout()) > > ### Name: remstats_repetition > ### Title: Compute Butts' (2008) Repetition Network Statistic for Event > ### Dyads in a Relational Event Sequence > ### Aliases: remstats_repetition > > ### ** Examples > > data("WikiEvent2018.first100k") > WikiEvent2018 <- WikiEvent2018.first100k[1:10000,] #the first ten thousand events > WikiEvent2018$time <- as.numeric(WikiEvent2018$time) #making the variable numeric > ### Creating the EventSet By Employing Case-Control Sampling With M = 5 and > ### Sampling from the Observed Event Sequence with P = 0.01 > EventSet <- create_riskset(type = "two-mode", + time = WikiEvent2018$time, # The Time Variable + eventID = WikiEvent2018$eventID, # The Event Sequence Variable + sender = WikiEvent2018$user, # The Sender Variable + receiver = WikiEvent2018$article, # The Receiver Variable + p_samplingobserved = 0.01, # The Probability of Selection + n_controls = 8, # The Number of Controls to Sample from the Full Risk Set + combine = TRUE, + seed = 9999) # The Seed for Replication > > #Computing the repetition statistics for the relational event sequence with the > #weights of past events returned > rep_weights <- remstats_repetition( + time = EventSet$time, + sender = EventSet$sender, + receiver = EventSet$receiver, + sampled = EventSet$sampled, + observed = EventSet$observed, + halflife = 2.592e+09, #halflife parameter + dyadic_weight = 0, + exp_weight_form = FALSE) > > > #Computing the repetition statistics for the relational event sequence with the > #counts of events returned > rep_counts <- remstats_repetition( + time = EventSet$time, + sender = EventSet$sender, + receiver = EventSet$receiver, + sampled = EventSet$sampled, + observed = EventSet$observed, + halflife = 2.592e+09, #halflife parameter + dyadic_weight = 0, + exp_weight_form = FALSE) > > cbind(rep_weights, rep_counts) rep_weights rep_counts [1,] 0.0000000 0.0000000 [2,] 0.0000000 0.0000000 [3,] 0.0000000 0.0000000 [4,] 0.0000000 0.0000000 [5,] 0.0000000 0.0000000 [6,] 0.0000000 0.0000000 [7,] 0.0000000 0.0000000 [8,] 0.0000000 0.0000000 [9,] 0.0000000 0.0000000 [10,] 2.9998096 2.9998096 [11,] 0.0000000 0.0000000 [12,] 0.0000000 0.0000000 [13,] 0.0000000 0.0000000 [14,] 0.0000000 0.0000000 [15,] 0.0000000 0.0000000 [16,] 0.0000000 0.0000000 [17,] 0.0000000 0.0000000 [18,] 0.0000000 0.0000000 [19,] 1.9997473 1.9997473 [20,] 0.0000000 0.0000000 [21,] 0.0000000 0.0000000 [22,] 0.0000000 0.0000000 [23,] 0.0000000 0.0000000 [24,] 0.0000000 0.0000000 [25,] 0.0000000 0.0000000 [26,] 0.0000000 0.0000000 [27,] 0.0000000 0.0000000 [28,] 0.0000000 0.0000000 [29,] 0.0000000 0.0000000 [30,] 0.0000000 0.0000000 [31,] 0.0000000 0.0000000 [32,] 0.0000000 0.0000000 [33,] 0.0000000 0.0000000 [34,] 0.0000000 0.0000000 [35,] 0.0000000 0.0000000 [36,] 0.0000000 0.0000000 [37,] 0.0000000 0.0000000 [38,] 0.0000000 0.0000000 [39,] 0.0000000 0.0000000 [40,] 0.0000000 0.0000000 [41,] 0.0000000 0.0000000 [42,] 0.0000000 0.0000000 [43,] 0.0000000 0.0000000 [44,] 0.0000000 0.0000000 [45,] 0.0000000 0.0000000 [46,] 0.0000000 0.0000000 [47,] 0.0000000 0.0000000 [48,] 0.0000000 0.0000000 [49,] 0.0000000 0.0000000 [50,] 0.0000000 0.0000000 [51,] 0.0000000 0.0000000 [52,] 0.0000000 0.0000000 [53,] 0.0000000 0.0000000 [54,] 0.0000000 0.0000000 [55,] 0.0000000 0.0000000 [56,] 0.0000000 0.0000000 [57,] 0.0000000 0.0000000 [58,] 0.0000000 0.0000000 [59,] 0.0000000 0.0000000 [60,] 0.0000000 0.0000000 [61,] 0.0000000 0.0000000 [62,] 0.0000000 0.0000000 [63,] 0.0000000 0.0000000 [64,] 0.0000000 0.0000000 [65,] 0.0000000 0.0000000 [66,] 0.0000000 0.0000000 [67,] 0.0000000 0.0000000 [68,] 0.0000000 0.0000000 [69,] 0.0000000 0.0000000 [70,] 0.0000000 0.0000000 [71,] 0.0000000 0.0000000 [72,] 0.0000000 0.0000000 [73,] 0.0000000 0.0000000 [74,] 0.0000000 0.0000000 [75,] 0.0000000 0.0000000 [76,] 0.0000000 0.0000000 [77,] 0.0000000 0.0000000 [78,] 0.0000000 0.0000000 [79,] 0.0000000 0.0000000 [80,] 0.0000000 0.0000000 [81,] 0.0000000 0.0000000 [82,] 0.0000000 0.0000000 [83,] 0.0000000 0.0000000 [84,] 0.0000000 0.0000000 [85,] 0.0000000 0.0000000 [86,] 0.0000000 0.0000000 [87,] 0.0000000 0.0000000 [88,] 0.0000000 0.0000000 [89,] 0.0000000 0.0000000 [90,] 0.0000000 0.0000000 [91,] 0.5284543 0.5284543 [92,] 0.0000000 0.0000000 [93,] 0.0000000 0.0000000 [94,] 0.0000000 0.0000000 [95,] 0.0000000 0.0000000 [96,] 0.0000000 0.0000000 [97,] 0.0000000 0.0000000 [98,] 0.0000000 0.0000000 [99,] 0.0000000 0.0000000 [100,] 0.0000000 0.0000000 [101,] 0.0000000 0.0000000 [102,] 0.0000000 0.0000000 [103,] 0.0000000 0.0000000 [104,] 0.0000000 0.0000000 [105,] 0.0000000 0.0000000 [106,] 0.0000000 0.0000000 [107,] 0.0000000 0.0000000 [108,] 0.0000000 0.0000000 [109,] 0.0000000 0.0000000 [110,] 0.0000000 0.0000000 [111,] 0.0000000 0.0000000 [112,] 0.0000000 0.0000000 [113,] 0.0000000 0.0000000 [114,] 0.0000000 0.0000000 [115,] 0.0000000 0.0000000 [116,] 0.0000000 0.0000000 [117,] 0.0000000 0.0000000 [118,] 0.9802672 0.9802672 [119,] 0.0000000 0.0000000 [120,] 0.0000000 0.0000000 [121,] 0.0000000 0.0000000 [122,] 0.0000000 0.0000000 [123,] 0.0000000 0.0000000 [124,] 0.0000000 0.0000000 [125,] 0.0000000 0.0000000 [126,] 0.0000000 0.0000000 [127,] 0.0000000 0.0000000 [128,] 0.0000000 0.0000000 [129,] 0.0000000 0.0000000 [130,] 0.0000000 0.0000000 [131,] 0.0000000 0.0000000 [132,] 0.0000000 0.0000000 [133,] 0.0000000 0.0000000 [134,] 0.0000000 0.0000000 [135,] 0.0000000 0.0000000 [136,] 2.9785850 2.9785850 [137,] 0.0000000 0.0000000 [138,] 0.0000000 0.0000000 [139,] 0.0000000 0.0000000 [140,] 0.0000000 0.0000000 [141,] 0.0000000 0.0000000 [142,] 0.0000000 0.0000000 [143,] 0.0000000 0.0000000 [144,] 0.0000000 0.0000000 [145,] 0.0000000 0.0000000 [146,] 0.0000000 0.0000000 [147,] 0.0000000 0.0000000 [148,] 0.0000000 0.0000000 [149,] 0.0000000 0.0000000 [150,] 0.0000000 0.0000000 [151,] 0.0000000 0.0000000 [152,] 0.0000000 0.0000000 [153,] 0.0000000 0.0000000 [154,] 0.0000000 0.0000000 [155,] 0.0000000 0.0000000 [156,] 0.0000000 0.0000000 [157,] 0.0000000 0.0000000 [158,] 0.0000000 0.0000000 [159,] 0.0000000 0.0000000 [160,] 0.0000000 0.0000000 [161,] 0.0000000 0.0000000 [162,] 0.0000000 0.0000000 [163,] 0.0000000 0.0000000 [164,] 0.0000000 0.0000000 [165,] 0.0000000 0.0000000 [166,] 0.0000000 0.0000000 [167,] 0.0000000 0.0000000 [168,] 0.0000000 0.0000000 [169,] 0.0000000 0.0000000 [170,] 0.0000000 0.0000000 [171,] 0.0000000 0.0000000 [172,] 0.0000000 0.0000000 [173,] 0.0000000 0.0000000 [174,] 0.0000000 0.0000000 [175,] 0.0000000 0.0000000 [176,] 0.0000000 0.0000000 [177,] 0.0000000 0.0000000 [178,] 0.0000000 0.0000000 [179,] 0.0000000 0.0000000 [180,] 0.0000000 0.0000000 [181,] 0.0000000 0.0000000 [182,] 0.0000000 0.0000000 [183,] 0.0000000 0.0000000 [184,] 0.0000000 0.0000000 [185,] 0.0000000 0.0000000 [186,] 0.0000000 0.0000000 [187,] 0.0000000 0.0000000 [188,] 0.0000000 0.0000000 [189,] 0.0000000 0.0000000 [190,] 0.0000000 0.0000000 [191,] 0.0000000 0.0000000 [192,] 0.0000000 0.0000000 [193,] 0.0000000 0.0000000 [194,] 0.0000000 0.0000000 [195,] 0.0000000 0.0000000 [196,] 0.0000000 0.0000000 [197,] 0.0000000 0.0000000 [198,] 0.0000000 0.0000000 [199,] 0.9938186 0.9938186 [200,] 0.0000000 0.0000000 [201,] 0.0000000 0.0000000 [202,] 0.0000000 0.0000000 [203,] 0.0000000 0.0000000 [204,] 0.0000000 0.0000000 [205,] 0.0000000 0.0000000 [206,] 0.0000000 0.0000000 [207,] 0.0000000 0.0000000 [208,] 0.0000000 0.0000000 [209,] 0.0000000 0.0000000 [210,] 0.0000000 0.0000000 [211,] 0.0000000 0.0000000 [212,] 0.0000000 0.0000000 [213,] 0.0000000 0.0000000 [214,] 0.0000000 0.0000000 [215,] 0.0000000 0.0000000 [216,] 0.0000000 0.0000000 [217,] 0.0000000 0.0000000 [218,] 0.0000000 0.0000000 [219,] 0.0000000 0.0000000 [220,] 0.0000000 0.0000000 [221,] 0.0000000 0.0000000 [222,] 0.0000000 0.0000000 [223,] 0.0000000 0.0000000 [224,] 0.0000000 0.0000000 [225,] 0.0000000 0.0000000 [226,] 0.0000000 0.0000000 [227,] 0.0000000 0.0000000 [228,] 0.0000000 0.0000000 [229,] 0.0000000 0.0000000 [230,] 0.0000000 0.0000000 [231,] 0.0000000 0.0000000 [232,] 0.0000000 0.0000000 [233,] 0.0000000 0.0000000 [234,] 0.0000000 0.0000000 [235,] 0.0000000 0.0000000 [236,] 0.0000000 0.0000000 [237,] 0.0000000 0.0000000 [238,] 0.0000000 0.0000000 [239,] 0.0000000 0.0000000 [240,] 0.0000000 0.0000000 [241,] 0.0000000 0.0000000 [242,] 0.0000000 0.0000000 [243,] 0.0000000 0.0000000 [244,] 0.0000000 0.0000000 [245,] 0.0000000 0.0000000 [246,] 0.0000000 0.0000000 [247,] 0.0000000 0.0000000 [248,] 0.0000000 0.0000000 [249,] 0.0000000 0.0000000 [250,] 0.0000000 0.0000000 [251,] 0.0000000 0.0000000 [252,] 0.0000000 0.0000000 [253,] 0.0000000 0.0000000 [254,] 0.0000000 0.0000000 [255,] 0.0000000 0.0000000 [256,] 0.0000000 0.0000000 [257,] 0.0000000 0.0000000 [258,] 0.0000000 0.0000000 [259,] 0.0000000 0.0000000 [260,] 0.0000000 0.0000000 [261,] 0.0000000 0.0000000 [262,] 0.9999775 0.9999775 [263,] 0.0000000 0.0000000 [264,] 0.0000000 0.0000000 [265,] 0.0000000 0.0000000 [266,] 0.0000000 0.0000000 [267,] 0.0000000 0.0000000 [268,] 0.0000000 0.0000000 [269,] 0.0000000 0.0000000 [270,] 0.0000000 0.0000000 [271,] 0.0000000 0.0000000 [272,] 0.0000000 0.0000000 [273,] 0.0000000 0.0000000 [274,] 0.0000000 0.0000000 [275,] 0.0000000 0.0000000 [276,] 0.0000000 0.0000000 [277,] 0.0000000 0.0000000 [278,] 0.0000000 0.0000000 [279,] 0.0000000 0.0000000 [280,] 0.0000000 0.0000000 [281,] 0.0000000 0.0000000 [282,] 0.0000000 0.0000000 [283,] 0.0000000 0.0000000 [284,] 0.0000000 0.0000000 [285,] 0.0000000 0.0000000 [286,] 0.0000000 0.0000000 [287,] 0.0000000 0.0000000 [288,] 0.0000000 0.0000000 [289,] 1.9843117 1.9843117 [290,] 0.0000000 0.0000000 [291,] 0.0000000 0.0000000 [292,] 0.0000000 0.0000000 [293,] 0.0000000 0.0000000 [294,] 0.0000000 0.0000000 [295,] 0.0000000 0.0000000 [296,] 0.0000000 0.0000000 [297,] 0.0000000 0.0000000 [298,] 0.0000000 0.0000000 [299,] 0.0000000 0.0000000 [300,] 0.0000000 0.0000000 [301,] 0.0000000 0.0000000 [302,] 0.0000000 0.0000000 [303,] 0.0000000 0.0000000 [304,] 0.0000000 0.0000000 [305,] 0.0000000 0.0000000 [306,] 0.0000000 0.0000000 [307,] 0.0000000 0.0000000 [308,] 0.0000000 0.0000000 [309,] 0.0000000 0.0000000 [310,] 0.0000000 0.0000000 [311,] 0.0000000 0.0000000 [312,] 0.0000000 0.0000000 [313,] 0.0000000 0.0000000 [314,] 0.0000000 0.0000000 [315,] 0.0000000 0.0000000 [316,] 0.0000000 0.0000000 [317,] 0.0000000 0.0000000 [318,] 0.0000000 0.0000000 [319,] 0.0000000 0.0000000 [320,] 0.0000000 0.0000000 [321,] 0.0000000 0.0000000 [322,] 0.0000000 0.0000000 [323,] 0.0000000 0.0000000 [324,] 0.0000000 0.0000000 [325,] 0.9998856 0.9998856 [326,] 0.0000000 0.0000000 [327,] 0.0000000 0.0000000 [328,] 0.0000000 0.0000000 [329,] 0.0000000 0.0000000 [330,] 0.0000000 0.0000000 [331,] 0.0000000 0.0000000 [332,] 0.0000000 0.0000000 [333,] 0.0000000 0.0000000 [334,] 0.0000000 0.0000000 [335,] 0.0000000 0.0000000 [336,] 0.0000000 0.0000000 [337,] 0.0000000 0.0000000 [338,] 0.0000000 0.0000000 [339,] 0.0000000 0.0000000 [340,] 0.0000000 0.0000000 [341,] 0.0000000 0.0000000 [342,] 0.0000000 0.0000000 [343,] 0.0000000 0.0000000 [344,] 0.0000000 0.0000000 [345,] 0.0000000 0.0000000 [346,] 0.0000000 0.0000000 [347,] 0.0000000 0.0000000 [348,] 0.0000000 0.0000000 [349,] 0.0000000 0.0000000 [350,] 0.0000000 0.0000000 [351,] 0.0000000 0.0000000 [352,] 0.0000000 0.0000000 [353,] 0.0000000 0.0000000 [354,] 0.0000000 0.0000000 [355,] 0.0000000 0.0000000 [356,] 0.0000000 0.0000000 [357,] 0.0000000 0.0000000 [358,] 0.0000000 0.0000000 [359,] 0.0000000 0.0000000 [360,] 0.0000000 0.0000000 [361,] 0.0000000 0.0000000 [362,] 0.0000000 0.0000000 [363,] 0.0000000 0.0000000 [364,] 0.0000000 0.0000000 [365,] 0.0000000 0.0000000 [366,] 0.0000000 0.0000000 [367,] 0.0000000 0.0000000 [368,] 0.0000000 0.0000000 [369,] 0.0000000 0.0000000 [370,] 0.0000000 0.0000000 [371,] 0.0000000 0.0000000 [372,] 0.0000000 0.0000000 [373,] 0.0000000 0.0000000 [374,] 0.0000000 0.0000000 [375,] 0.0000000 0.0000000 [376,] 0.0000000 0.0000000 [377,] 0.0000000 0.0000000 [378,] 0.0000000 0.0000000 [379,] 0.0000000 0.0000000 [380,] 0.0000000 0.0000000 [381,] 0.0000000 0.0000000 [382,] 0.0000000 0.0000000 [383,] 0.0000000 0.0000000 [384,] 0.0000000 0.0000000 [385,] 0.0000000 0.0000000 [386,] 0.0000000 0.0000000 [387,] 0.0000000 0.0000000 [388,] 0.0000000 0.0000000 [389,] 0.0000000 0.0000000 [390,] 0.0000000 0.0000000 [391,] 0.0000000 0.0000000 [392,] 0.0000000 0.0000000 [393,] 0.0000000 0.0000000 [394,] 0.0000000 0.0000000 [395,] 0.0000000 0.0000000 [396,] 0.0000000 0.0000000 [397,] 0.0000000 0.0000000 [398,] 0.0000000 0.0000000 [399,] 0.0000000 0.0000000 [400,] 0.0000000 0.0000000 [401,] 0.0000000 0.0000000 [402,] 0.0000000 0.0000000 [403,] 0.0000000 0.0000000 [404,] 0.0000000 0.0000000 [405,] 0.0000000 0.0000000 [406,] 0.0000000 0.0000000 [407,] 0.0000000 0.0000000 [408,] 0.0000000 0.0000000 [409,] 0.0000000 0.0000000 [410,] 0.0000000 0.0000000 [411,] 0.0000000 0.0000000 [412,] 0.0000000 0.0000000 [413,] 0.0000000 0.0000000 [414,] 0.0000000 0.0000000 [415,] 0.0000000 0.0000000 [416,] 0.0000000 0.0000000 [417,] 0.0000000 0.0000000 [418,] 0.0000000 0.0000000 [419,] 0.0000000 0.0000000 [420,] 0.0000000 0.0000000 [421,] 0.0000000 0.0000000 [422,] 0.0000000 0.0000000 [423,] 0.0000000 0.0000000 [424,] 0.0000000 0.0000000 [425,] 0.0000000 0.0000000 [426,] 0.0000000 0.0000000 [427,] 0.0000000 0.0000000 [428,] 0.0000000 0.0000000 [429,] 0.0000000 0.0000000 [430,] 0.0000000 0.0000000 [431,] 0.0000000 0.0000000 [432,] 0.0000000 0.0000000 [433,] 0.0000000 0.0000000 [434,] 0.0000000 0.0000000 [435,] 0.0000000 0.0000000 [436,] 0.0000000 0.0000000 [437,] 0.0000000 0.0000000 [438,] 0.0000000 0.0000000 [439,] 0.0000000 0.0000000 [440,] 0.0000000 0.0000000 [441,] 0.0000000 0.0000000 [442,] 0.9775604 0.9775604 [443,] 0.0000000 0.0000000 [444,] 0.0000000 0.0000000 [445,] 0.0000000 0.0000000 [446,] 0.0000000 0.0000000 [447,] 0.0000000 0.0000000 [448,] 0.0000000 0.0000000 [449,] 0.0000000 0.0000000 [450,] 0.0000000 0.0000000 [451,] 0.0000000 0.0000000 [452,] 0.0000000 0.0000000 [453,] 0.0000000 0.0000000 [454,] 0.0000000 0.0000000 [455,] 0.0000000 0.0000000 [456,] 0.0000000 0.0000000 [457,] 0.0000000 0.0000000 [458,] 0.0000000 0.0000000 [459,] 0.0000000 0.0000000 [460,] 0.0000000 0.0000000 [461,] 0.0000000 0.0000000 [462,] 0.0000000 0.0000000 [463,] 0.0000000 0.0000000 [464,] 0.0000000 0.0000000 [465,] 0.0000000 0.0000000 [466,] 0.0000000 0.0000000 [467,] 0.0000000 0.0000000 [468,] 0.0000000 0.0000000 [469,] 0.0000000 0.0000000 [470,] 0.0000000 0.0000000 [471,] 0.0000000 0.0000000 [472,] 0.0000000 0.0000000 [473,] 0.0000000 0.0000000 [474,] 0.0000000 0.0000000 [475,] 0.0000000 0.0000000 [476,] 0.0000000 0.0000000 [477,] 0.0000000 0.0000000 [478,] 0.0000000 0.0000000 [479,] 0.0000000 0.0000000 [480,] 0.0000000 0.0000000 [481,] 0.0000000 0.0000000 [482,] 0.0000000 0.0000000 [483,] 0.0000000 0.0000000 [484,] 0.0000000 0.0000000 [485,] 0.0000000 0.0000000 [486,] 0.0000000 0.0000000 [487,] 0.0000000 0.0000000 [488,] 0.0000000 0.0000000 [489,] 0.0000000 0.0000000 [490,] 0.0000000 0.0000000 [491,] 0.0000000 0.0000000 [492,] 0.0000000 0.0000000 [493,] 0.0000000 0.0000000 [494,] 0.0000000 0.0000000 [495,] 0.0000000 0.0000000 [496,] 0.9999906 0.9999906 [497,] 0.0000000 0.0000000 [498,] 0.0000000 0.0000000 [499,] 0.0000000 0.0000000 [500,] 0.0000000 0.0000000 [501,] 0.0000000 0.0000000 [502,] 0.0000000 0.0000000 [503,] 0.0000000 0.0000000 [504,] 0.0000000 0.0000000 [505,] 0.0000000 0.0000000 [506,] 0.0000000 0.0000000 [507,] 0.0000000 0.0000000 [508,] 0.0000000 0.0000000 [509,] 0.0000000 0.0000000 [510,] 0.0000000 0.0000000 [511,] 0.0000000 0.0000000 [512,] 0.0000000 0.0000000 [513,] 0.0000000 0.0000000 [514,] 0.0000000 0.0000000 [515,] 0.0000000 0.0000000 [516,] 0.0000000 0.0000000 [517,] 0.0000000 0.0000000 [518,] 0.0000000 0.0000000 [519,] 0.0000000 0.0000000 [520,] 0.0000000 0.0000000 [521,] 0.0000000 0.0000000 [522,] 0.0000000 0.0000000 [523,] 0.9999896 0.9999896 [524,] 0.0000000 0.0000000 [525,] 0.0000000 0.0000000 [526,] 0.0000000 0.0000000 [527,] 0.0000000 0.0000000 [528,] 0.0000000 0.0000000 [529,] 0.0000000 0.0000000 [530,] 0.0000000 0.0000000 [531,] 0.0000000 0.0000000 [532,] 0.0000000 0.0000000 [533,] 0.0000000 0.0000000 [534,] 0.0000000 0.0000000 [535,] 0.0000000 0.0000000 [536,] 0.0000000 0.0000000 [537,] 0.0000000 0.0000000 [538,] 0.0000000 0.0000000 [539,] 0.0000000 0.0000000 [540,] 0.0000000 0.0000000 [541,] 0.0000000 0.0000000 [542,] 0.0000000 0.0000000 [543,] 0.0000000 0.0000000 [544,] 0.0000000 0.0000000 [545,] 0.0000000 0.0000000 [546,] 0.0000000 0.0000000 [547,] 0.0000000 0.0000000 [548,] 0.0000000 0.0000000 [549,] 0.0000000 0.0000000 [550,] 0.0000000 0.0000000 [551,] 0.0000000 0.0000000 [552,] 0.0000000 0.0000000 [553,] 0.0000000 0.0000000 [554,] 0.0000000 0.0000000 [555,] 0.0000000 0.0000000 [556,] 0.0000000 0.0000000 [557,] 0.0000000 0.0000000 [558,] 0.0000000 0.0000000 [559,] 0.0000000 0.0000000 [560,] 0.0000000 0.0000000 [561,] 0.0000000 0.0000000 [562,] 0.0000000 0.0000000 [563,] 0.0000000 0.0000000 [564,] 0.0000000 0.0000000 [565,] 0.0000000 0.0000000 [566,] 0.0000000 0.0000000 [567,] 0.0000000 0.0000000 [568,] 0.0000000 0.0000000 [569,] 0.0000000 0.0000000 [570,] 0.0000000 0.0000000 [571,] 0.0000000 0.0000000 [572,] 0.0000000 0.0000000 [573,] 0.0000000 0.0000000 [574,] 0.0000000 0.0000000 [575,] 0.0000000 0.0000000 [576,] 0.0000000 0.0000000 [577,] 0.0000000 0.0000000 [578,] 0.0000000 0.0000000 [579,] 0.0000000 0.0000000 [580,] 0.0000000 0.0000000 [581,] 0.0000000 0.0000000 [582,] 0.0000000 0.0000000 [583,] 0.0000000 0.0000000 [584,] 0.0000000 0.0000000 [585,] 0.0000000 0.0000000 [586,] 0.0000000 0.0000000 [587,] 0.0000000 0.0000000 [588,] 0.0000000 0.0000000 [589,] 0.0000000 0.0000000 [590,] 0.0000000 0.0000000 [591,] 0.0000000 0.0000000 [592,] 0.0000000 0.0000000 [593,] 0.0000000 0.0000000 [594,] 0.0000000 0.0000000 [595,] 0.0000000 0.0000000 [596,] 0.0000000 0.0000000 [597,] 0.0000000 0.0000000 [598,] 0.0000000 0.0000000 [599,] 0.0000000 0.0000000 [600,] 0.0000000 0.0000000 [601,] 0.0000000 0.0000000 [602,] 0.0000000 0.0000000 [603,] 0.0000000 0.0000000 [604,] 0.0000000 0.0000000 [605,] 0.0000000 0.0000000 [606,] 0.0000000 0.0000000 [607,] 0.0000000 0.0000000 [608,] 0.0000000 0.0000000 [609,] 0.0000000 0.0000000 [610,] 0.0000000 0.0000000 [611,] 0.0000000 0.0000000 [612,] 0.0000000 0.0000000 [613,] 0.0000000 0.0000000 [614,] 0.0000000 0.0000000 [615,] 0.0000000 0.0000000 [616,] 0.0000000 0.0000000 [617,] 0.0000000 0.0000000 [618,] 0.0000000 0.0000000 [619,] 0.0000000 0.0000000 [620,] 0.0000000 0.0000000 [621,] 0.0000000 0.0000000 [622,] 0.0000000 0.0000000 [623,] 0.0000000 0.0000000 [624,] 0.0000000 0.0000000 [625,] 0.0000000 0.0000000 [626,] 0.0000000 0.0000000 [627,] 0.0000000 0.0000000 [628,] 0.0000000 0.0000000 [629,] 0.0000000 0.0000000 [630,] 0.0000000 0.0000000 [631,] 0.0000000 0.0000000 [632,] 0.0000000 0.0000000 [633,] 0.0000000 0.0000000 [634,] 0.0000000 0.0000000 [635,] 0.0000000 0.0000000 [636,] 0.0000000 0.0000000 [637,] 0.0000000 0.0000000 [638,] 0.0000000 0.0000000 [639,] 0.0000000 0.0000000 [640,] 0.0000000 0.0000000 [641,] 0.0000000 0.0000000 [642,] 0.0000000 0.0000000 [643,] 0.0000000 0.0000000 [644,] 0.0000000 0.0000000 [645,] 0.0000000 0.0000000 [646,] 0.0000000 0.0000000 [647,] 0.0000000 0.0000000 [648,] 0.0000000 0.0000000 [649,] 0.0000000 0.0000000 [650,] 0.0000000 0.0000000 [651,] 0.0000000 0.0000000 [652,] 0.0000000 0.0000000 [653,] 0.0000000 0.0000000 [654,] 0.0000000 0.0000000 [655,] 0.0000000 0.0000000 [656,] 0.0000000 0.0000000 [657,] 0.0000000 0.0000000 [658,] 0.0000000 0.0000000 [659,] 0.0000000 0.0000000 [660,] 0.0000000 0.0000000 [661,] 0.0000000 0.0000000 [662,] 0.0000000 0.0000000 [663,] 0.0000000 0.0000000 [664,] 0.0000000 0.0000000 [665,] 0.0000000 0.0000000 [666,] 0.0000000 0.0000000 [667,] 0.0000000 0.0000000 [668,] 0.0000000 0.0000000 [669,] 0.0000000 0.0000000 [670,] 0.0000000 0.0000000 [671,] 0.0000000 0.0000000 [672,] 0.0000000 0.0000000 [673,] 0.0000000 0.0000000 [674,] 0.0000000 0.0000000 [675,] 0.0000000 0.0000000 [676,] 0.0000000 0.0000000 [677,] 0.0000000 0.0000000 [678,] 0.0000000 0.0000000 [679,] 0.0000000 0.0000000 [680,] 0.0000000 0.0000000 [681,] 0.0000000 0.0000000 [682,] 0.0000000 0.0000000 [683,] 0.0000000 0.0000000 [684,] 0.0000000 0.0000000 [685,] 0.0000000 0.0000000 [686,] 0.0000000 0.0000000 [687,] 0.0000000 0.0000000 [688,] 0.0000000 0.0000000 [689,] 0.0000000 0.0000000 [690,] 0.0000000 0.0000000 [691,] 0.0000000 0.0000000 [692,] 0.0000000 0.0000000 [693,] 0.0000000 0.0000000 [694,] 0.0000000 0.0000000 [695,] 0.0000000 0.0000000 [696,] 0.0000000 0.0000000 [697,] 0.0000000 0.0000000 [698,] 0.0000000 0.0000000 [699,] 0.0000000 0.0000000 [700,] 0.0000000 0.0000000 [701,] 0.0000000 0.0000000 [702,] 0.0000000 0.0000000 [703,] 0.0000000 0.0000000 [704,] 0.0000000 0.0000000 [705,] 0.0000000 0.0000000 [706,] 0.0000000 0.0000000 [707,] 0.0000000 0.0000000 [708,] 0.0000000 0.0000000 [709,] 0.0000000 0.0000000 [710,] 0.0000000 0.0000000 [711,] 0.0000000 0.0000000 [712,] 0.0000000 0.0000000 [713,] 0.0000000 0.0000000 [714,] 0.0000000 0.0000000 [715,] 0.0000000 0.0000000 [716,] 0.0000000 0.0000000 [717,] 0.0000000 0.0000000 [718,] 0.0000000 0.0000000 [719,] 0.0000000 0.0000000 [720,] 0.0000000 0.0000000 [721,] 0.9997334 0.9997334 [722,] 0.0000000 0.0000000 [723,] 0.0000000 0.0000000 [724,] 0.0000000 0.0000000 [725,] 0.0000000 0.0000000 [726,] 0.0000000 0.0000000 [727,] 0.0000000 0.0000000 [728,] 0.0000000 0.0000000 [729,] 0.0000000 0.0000000 [730,] 3.9969473 3.9969473 [731,] 0.0000000 0.0000000 [732,] 0.0000000 0.0000000 [733,] 0.0000000 0.0000000 [734,] 0.0000000 0.0000000 [735,] 0.0000000 0.0000000 [736,] 0.0000000 0.0000000 [737,] 0.0000000 0.0000000 [738,] 0.0000000 0.0000000 [739,] 0.0000000 0.0000000 [740,] 0.0000000 0.0000000 [741,] 0.0000000 0.0000000 [742,] 0.0000000 0.0000000 [743,] 0.0000000 0.0000000 [744,] 0.0000000 0.0000000 [745,] 0.0000000 0.0000000 [746,] 0.0000000 0.0000000 [747,] 0.0000000 0.0000000 [748,] 0.0000000 0.0000000 [749,] 0.0000000 0.0000000 [750,] 0.0000000 0.0000000 [751,] 0.0000000 0.0000000 [752,] 0.0000000 0.0000000 [753,] 0.0000000 0.0000000 [754,] 0.0000000 0.0000000 [755,] 0.0000000 0.0000000 [756,] 0.0000000 0.0000000 [757,] 0.0000000 0.0000000 [758,] 0.0000000 0.0000000 [759,] 0.0000000 0.0000000 [760,] 0.0000000 0.0000000 [761,] 0.0000000 0.0000000 [762,] 0.0000000 0.0000000 [763,] 0.0000000 0.0000000 [764,] 0.0000000 0.0000000 [765,] 0.0000000 0.0000000 [766,] 0.0000000 0.0000000 [767,] 0.0000000 0.0000000 [768,] 0.0000000 0.0000000 [769,] 0.0000000 0.0000000 [770,] 0.0000000 0.0000000 [771,] 0.0000000 0.0000000 [772,] 0.0000000 0.0000000 [773,] 0.0000000 0.0000000 [774,] 0.0000000 0.0000000 [775,] 0.0000000 0.0000000 [776,] 0.0000000 0.0000000 [777,] 0.0000000 0.0000000 [778,] 0.0000000 0.0000000 [779,] 0.0000000 0.0000000 [780,] 0.0000000 0.0000000 [781,] 0.0000000 0.0000000 [782,] 0.0000000 0.0000000 [783,] 0.0000000 0.0000000 [784,] 0.9994070 0.9994070 [785,] 0.0000000 0.0000000 [786,] 0.0000000 0.0000000 [787,] 0.0000000 0.0000000 [788,] 0.0000000 0.0000000 [789,] 0.0000000 0.0000000 [790,] 0.0000000 0.0000000 [791,] 0.0000000 0.0000000 [792,] 0.0000000 0.0000000 [793,] 0.9999307 0.9999307 [794,] 0.0000000 0.0000000 [795,] 0.0000000 0.0000000 [796,] 0.0000000 0.0000000 [797,] 0.0000000 0.0000000 [798,] 0.0000000 0.0000000 [799,] 0.0000000 0.0000000 [800,] 0.0000000 0.0000000 [801,] 0.0000000 0.0000000 [802,] 0.0000000 0.0000000 [803,] 0.0000000 0.0000000 [804,] 0.0000000 0.0000000 [805,] 0.0000000 0.0000000 [806,] 0.0000000 0.0000000 [807,] 0.0000000 0.0000000 [808,] 0.0000000 0.0000000 [809,] 0.0000000 0.0000000 [810,] 0.0000000 0.0000000 [811,] 0.0000000 0.0000000 [812,] 0.0000000 0.0000000 [813,] 0.0000000 0.0000000 [814,] 0.0000000 0.0000000 [815,] 0.0000000 0.0000000 [816,] 0.0000000 0.0000000 [817,] 0.0000000 0.0000000 [818,] 0.0000000 0.0000000 [819,] 0.0000000 0.0000000 [820,] 0.0000000 0.0000000 [821,] 0.0000000 0.0000000 [822,] 0.0000000 0.0000000 [823,] 0.0000000 0.0000000 [824,] 0.0000000 0.0000000 [825,] 0.0000000 0.0000000 [826,] 0.0000000 0.0000000 [827,] 0.0000000 0.0000000 [828,] 0.0000000 0.0000000 [829,] 0.0000000 0.0000000 [830,] 0.0000000 0.0000000 [831,] 0.0000000 0.0000000 [832,] 0.0000000 0.0000000 [833,] 0.0000000 0.0000000 [834,] 0.0000000 0.0000000 [835,] 0.0000000 0.0000000 [836,] 0.0000000 0.0000000 [837,] 0.0000000 0.0000000 [838,] 0.0000000 0.0000000 [839,] 0.0000000 0.0000000 [840,] 0.0000000 0.0000000 [841,] 0.0000000 0.0000000 [842,] 0.0000000 0.0000000 [843,] 0.0000000 0.0000000 [844,] 0.0000000 0.0000000 [845,] 0.0000000 0.0000000 [846,] 0.0000000 0.0000000 [847,] 0.0000000 0.0000000 [848,] 0.0000000 0.0000000 [849,] 0.0000000 0.0000000 [850,] 0.0000000 0.0000000 [851,] 0.0000000 0.0000000 [852,] 0.0000000 0.0000000 [853,] 0.0000000 0.0000000 [854,] 0.0000000 0.0000000 [855,] 0.0000000 0.0000000 [856,] 2.9214995 2.9214995 [857,] 0.0000000 0.0000000 [858,] 0.0000000 0.0000000 [859,] 0.0000000 0.0000000 [860,] 0.0000000 0.0000000 [861,] 0.0000000 0.0000000 [862,] 0.0000000 0.0000000 [863,] 0.0000000 0.0000000 [864,] 0.0000000 0.0000000 [865,] 0.0000000 0.0000000 [866,] 0.0000000 0.0000000 [867,] 0.0000000 0.0000000 [868,] 0.0000000 0.0000000 [869,] 0.0000000 0.0000000 [870,] 0.0000000 0.0000000 [871,] 0.0000000 0.0000000 [872,] 0.0000000 0.0000000 [873,] 0.0000000 0.0000000 [874,] 0.0000000 0.0000000 [875,] 0.0000000 0.0000000 [876,] 0.0000000 0.0000000 [877,] 0.0000000 0.0000000 [878,] 0.0000000 0.0000000 [879,] 0.0000000 0.0000000 [880,] 0.0000000 0.0000000 [881,] 0.0000000 0.0000000 [882,] 0.0000000 0.0000000 [883,] 0.0000000 0.0000000 [884,] 0.0000000 0.0000000 [885,] 0.0000000 0.0000000 [886,] 0.0000000 0.0000000 [887,] 0.0000000 0.0000000 [888,] 0.0000000 0.0000000 [889,] 0.0000000 0.0000000 [890,] 0.0000000 0.0000000 [891,] 0.0000000 0.0000000 [892,] 0.0000000 0.0000000 [893,] 0.0000000 0.0000000 [894,] 0.0000000 0.0000000 [895,] 0.0000000 0.0000000 [896,] 0.0000000 0.0000000 [897,] 0.0000000 0.0000000 [898,] 0.0000000 0.0000000 [899,] 0.0000000 0.0000000 [900,] 0.0000000 0.0000000 > > > > > > cleanEx() > nameEx("remstats_triads") > ### * remstats_triads > > flush(stderr()); flush(stdout()) > > ### Name: remstats_triads > ### Title: Compute Butts' (2008) Triadic Formation Statistics for > ### Relational Event Sequences > ### Aliases: remstats_triads > > ### ** Examples > > events <- data.frame(time = 1:18, + eventID = 1:18, + sender = c("A", "B", "C", + "A", "D", "E", + "F", "B", "A", + "F", "D", "B", + "G", "B", "D", + "H", "A", "D"), + target = c("B", "C", "D", + "E", "A", "F", + "D", "A", "C", + "G", "B", "C", + "H", "J", "A", + "F", "C", "B")) > > eventSet <-create_riskset(type = "one-mode", + time = events$time, + eventID = events$eventID, + sender = events$sender, + receiver = events$target, + p_samplingobserved = 1.00, + n_controls = 1, + seed = 9999) > > #compute the triadic statistic for the outgoing shared partners formation > eventSet$OSP <- remstats_triads( + formation = "OSP", #outgoing shared partners argument + time = as.numeric(eventSet$time), + observed = eventSet$observed, + sampled = rep(1,nrow(eventSet)), + sender = eventSet$sender, + receiver = eventSet$receiver, + halflife = 2, #halflife parameter + dyadic_weight = 0, + exp_weight_form = FALSE) > > #compute the triadic statistic for the incoming shared partners formation > eventSet$ISP <- remstats_triads( + formation = "ISP", #incoming shared partners argument + time = as.numeric(eventSet$time), + observed = eventSet$observed, + sampled = rep(1,nrow(eventSet)), + sender = eventSet$sender, + receiver = eventSet$receiver, + halflife = 2, #halflife parameter + dyadic_weight = 0, + exp_weight_form = FALSE) > > #compute the triadic statistic for the outgoing two-paths formation > eventSet$OTP <- remstats_triads( + formation = "OTP", #outgoing two-paths argument + time = as.numeric(eventSet$time), + observed = eventSet$observed, + sampled = rep(1,nrow(eventSet)), + sender = eventSet$sender, + receiver = eventSet$receiver, + halflife = 2, #halflife parameter + dyadic_weight = 0, + exp_weight_form = FALSE) > > #compute the triadic statistic for the incoming two-paths formation > eventSet$ITP <- remstats_triads( + formation = "ITP", #incoming two-paths argument + time = as.numeric(eventSet$time), + observed = eventSet$observed, + sampled = rep(1,nrow(eventSet)), + sender = eventSet$sender, + receiver = eventSet$receiver, + halflife = 2, #halflife parameter + dyadic_weight = 0, + exp_weight_form = FALSE) > > > > cleanEx() > nameEx("simulateRESeq") > ### * simulateRESeq > > flush(stderr()); flush(stdout()) > > ### Name: simulateRESeq > ### Title: Simulate a Random One-Mode Relational Event Sequence > ### Aliases: simulateRESeq > > ### ** Examples > > #Creating a random relational sequence with 5 actors and 25 events > rem1<- simulateRESeq(n_actors = 25, + n_events = 1000, + inertia = TRUE, + inertia_p = 0.12, + recip = TRUE, + recip_p = 0.08, + sender_outdegree = TRUE, + sender_outdegree_p = 0.09, + target_indegree = TRUE, + target_indegree_p = 0.05, + assort = TRUE, + assort_p = -0.01, + trans_trips = TRUE, + trans_trips_p = 0.09, + three_cycles = TRUE, + three_cycles_p = 0.04, + starting_events = NULL, + returnStats = TRUE) > rem1 eventID sender target inertia reciprocity sender_outdegree 1: 1 6 10 0 0 0 2: 2 5 25 0 0 0 3: 3 11 21 0 0 0 4: 4 22 14 0 0 0 5: 5 17 9 0 0 0 --- 996: 996 3 18 1 0 37 997: 997 2 18 2 2 44 998: 998 8 24 2 1 45 999: 999 19 2 1 2 37 1000: 1000 17 25 2 6 36 target_indegree assort trans_trip three_cycles 1: 0 0 0 0 2: 0 0 0 0 3: 0 0 0 0 4: 0 0 0 0 5: 0 0 0 0 --- 996: 33 1221 59 52 997: 34 1496 71 59 998: 36 1620 65 86 999: 42 1554 56 90 1000: 40 1440 66 51 > > #Creating a random relational sequence with 100 actors and 1000 events with > #only inertia and reciprocity > rem2 <- simulateRESeq(n_actors = 100, + n_events = 1000, + inertia = TRUE, + inertia_p = 0.12, + recip = TRUE, + recip_p = 0.08, + returnStats = TRUE) > rem2 eventID sender target inertia reciprocity 1: 1 79 35 0 0 2: 2 4 92 0 0 3: 3 51 46 0 0 4: 4 58 86 0 0 5: 5 84 94 0 0 --- 996: 996 75 83 0 0 997: 997 87 92 0 0 998: 998 97 66 0 0 999: 999 87 42 0 0 1000: 1000 29 80 0 0 > > #Creating a random relational sequence based on the starting sequence with > #only inertia and reciprocity > rem3 <- simulateRESeq(n_actors = 100, #does not matter can be any value, this is + #overridden by the starting event sequence + n_events = 100, + inertia = TRUE, + inertia_p = 0.12, + recip = TRUE, + recip_p = 0.08, + #a random starting event sequence + starting_events = matrix(c(1:10, 10:1), + nrow = 10, ncol = 2, byrow = FALSE), + returnStats = TRUE) > rem3 eventID sender target inertia reciprocity 1: 1 1 10 0 0 2: 2 2 9 0 0 3: 3 3 8 0 0 4: 4 4 7 0 0 5: 5 5 6 0 0 6: 6 6 5 0 1 7: 7 7 4 0 1 8: 8 8 3 0 1 9: 9 9 2 0 1 10: 10 10 1 0 1 11: 11 1 7 0 0 12: 12 5 6 1 1 13: 13 6 9 0 0 14: 14 10 1 1 1 15: 15 3 1 0 0 16: 16 5 7 0 0 17: 17 1 10 1 2 18: 18 9 4 0 0 19: 19 4 8 0 0 20: 20 6 10 0 0 21: 21 10 1 2 2 22: 22 3 9 0 0 23: 23 3 8 1 1 24: 24 1 10 2 3 25: 25 8 5 0 0 26: 26 10 6 0 1 27: 27 7 1 0 1 28: 28 2 10 0 0 29: 29 4 2 0 0 30: 30 10 1 3 3 31: 31 3 8 2 1 32: 32 3 2 0 0 33: 33 5 6 2 1 34: 34 5 6 3 1 35: 35 7 2 0 0 36: 36 8 4 0 1 37: 37 1 3 0 1 38: 38 3 8 3 1 39: 39 6 9 1 0 40: 40 9 5 0 0 41: 41 6 5 1 4 42: 42 9 6 0 2 43: 43 9 6 1 2 44: 44 5 10 0 0 45: 45 1 9 0 0 46: 46 2 9 1 1 47: 47 5 10 1 0 48: 48 6 9 2 2 49: 49 10 3 0 0 50: 50 6 5 2 4 51: 51 3 5 0 0 52: 52 9 6 2 3 53: 53 10 8 0 0 54: 54 10 3 1 0 55: 55 4 3 0 0 56: 56 8 9 0 0 57: 57 6 9 3 3 58: 58 7 9 0 0 59: 59 2 9 2 1 60: 60 6 10 1 1 61: 61 6 3 0 0 62: 62 10 2 0 1 63: 63 1 10 3 4 64: 64 2 9 3 1 65: 65 7 6 0 0 66: 66 10 2 1 1 67: 67 7 4 1 1 68: 68 6 7 0 1 69: 69 8 4 1 1 70: 70 1 2 0 0 71: 71 8 4 2 1 72: 72 2 4 0 1 73: 73 10 5 0 2 74: 74 3 5 1 0 75: 75 6 4 0 0 76: 76 1 10 4 4 77: 77 2 1 0 1 78: 78 10 7 0 0 79: 79 1 2 1 1 80: 80 9 10 0 0 81: 81 7 3 0 0 82: 82 6 9 4 3 83: 83 8 7 0 0 84: 84 3 9 1 0 85: 85 7 3 1 0 86: 86 4 8 1 3 87: 87 2 7 0 1 88: 88 6 4 1 0 89: 89 3 2 1 0 90: 90 4 7 1 2 91: 91 8 4 3 2 92: 92 2 6 0 0 93: 93 3 8 4 1 94: 94 4 3 1 0 95: 95 8 2 0 0 96: 96 3 10 0 2 97: 97 9 8 0 1 98: 98 10 7 1 0 99: 99 6 2 0 1 100: 100 10 1 4 5 eventID sender target inertia reciprocity > > > > cleanEx() > nameEx("simulate_rem_seq") > ### * simulate_rem_seq > > flush(stderr()); flush(stdout()) > > ### Name: simulate_rem_seq > ### Title: Simulate a Random One-Mode Relational Event Sequence > ### Aliases: simulate_rem_seq > > ### ** Examples > > #Creating a random relational sequence with 5 actors and 25 events > rem1<- simulate_rem_seq(n_actors = 25, + n_events = 1000, + inertia = TRUE, + inertia_p = 0.12, + recip = TRUE, + recip_p = 0.08, + sender_outdegree = TRUE, + sender_outdegree_p = 0.09, + target_indegree = TRUE, + target_indegree_p = 0.05, + assort = TRUE, + assort_p = -0.01, + trans_trips = TRUE, + trans_trips_p = 0.09, + three_cycles = TRUE, + three_cycles_p = 0.04, + starting_events = NULL, + returnStats = TRUE) > rem1 eventID sender target inertia reciprocity sender_outdegree 1: 1 4 25 0 0 0 2: 2 17 12 0 0 0 3: 3 6 13 0 0 0 4: 4 18 12 0 0 0 5: 5 13 8 0 0 0 --- 996: 996 15 19 11 13 65 997: 997 5 19 10 11 63 998: 998 7 19 14 10 73 999: 999 15 19 12 13 66 1000: 1000 19 9 6 8 65 target_indegree assort trans_trip three_cycles 1: 0 0 0 0 2: 0 0 0 0 3: 0 0 0 0 4: 1 0 0 0 5: 0 0 0 0 --- 996: 60 3900 351 322 997: 61 3843 338 324 998: 62 4526 418 297 999: 63 4158 372 322 1000: 52 3380 348 226 > > #Creating a random relational sequence with 100 actors and 1000 events with > #only inertia and reciprocity > rem2 <- simulate_rem_seq(n_actors = 100, + n_events = 1000, + inertia = TRUE, + inertia_p = 0.12, + recip = TRUE, + recip_p = 0.08, + returnStats = TRUE) > rem2 eventID sender target inertia reciprocity 1: 1 74 37 0 0 2: 2 67 4 0 0 3: 3 81 6 0 0 4: 4 21 78 0 0 5: 5 69 96 0 0 --- 996: 996 31 70 0 0 997: 997 32 89 0 0 998: 998 64 37 0 1 999: 999 26 24 0 1 1000: 1000 67 26 0 1 > > #Creating a random relational sequence based on the starting sequence with > #only inertia and reciprocity > rem3 <- simulate_rem_seq(n_actors = 100, #does not matter can be any value, this is + #overridden by the starting event sequence + n_events = 100, + inertia = TRUE, + inertia_p = 0.12, + recip = TRUE, + recip_p = 0.08, + #a random starting event sequence + starting_events = matrix(c(1:10, 10:1), + nrow = 10, ncol = 2, byrow = FALSE), + returnStats = TRUE) > rem3 eventID sender target inertia reciprocity 1: 1 1 10 0 0 2: 2 2 9 0 0 3: 3 3 8 0 0 4: 4 4 7 0 0 5: 5 5 6 0 0 6: 6 6 5 0 1 7: 7 7 4 0 1 8: 8 8 3 0 1 9: 9 9 2 0 1 10: 10 10 1 0 1 11: 11 9 7 0 0 12: 12 7 6 0 0 13: 13 9 1 0 0 14: 14 2 7 0 0 15: 15 7 10 0 0 16: 16 9 5 0 0 17: 17 7 2 0 1 18: 18 8 4 0 0 19: 19 1 2 0 0 20: 20 3 1 0 0 21: 21 8 9 0 0 22: 22 2 7 1 1 23: 23 8 4 1 0 24: 24 5 3 0 0 25: 25 4 2 0 0 26: 26 10 4 0 0 27: 27 10 3 0 0 28: 28 8 3 1 1 29: 29 9 3 0 0 30: 30 4 7 1 1 31: 31 3 1 1 0 32: 32 10 9 0 0 33: 33 5 1 0 0 34: 34 7 8 0 0 35: 35 1 2 1 0 36: 36 5 3 1 0 37: 37 8 9 1 0 38: 38 3 8 1 2 39: 39 6 7 0 1 40: 40 7 10 1 0 41: 41 9 5 1 0 42: 42 1 8 0 0 43: 43 9 1 1 0 44: 44 3 8 2 2 45: 45 2 8 0 0 46: 46 1 2 2 0 47: 47 7 8 1 0 48: 48 8 6 0 0 49: 49 10 6 0 0 50: 50 6 1 0 0 51: 51 7 5 0 0 52: 52 10 2 0 0 53: 53 1 7 0 0 54: 54 6 7 1 1 55: 55 10 4 1 0 56: 56 1 2 3 0 57: 57 1 7 1 0 58: 58 9 7 1 0 59: 59 4 2 1 0 60: 60 5 7 0 1 61: 61 1 10 1 1 62: 62 5 10 0 0 63: 63 2 7 2 1 64: 64 10 7 0 2 65: 65 3 6 0 0 66: 66 2 9 1 1 67: 67 8 7 0 2 68: 68 8 5 0 0 69: 69 10 2 1 0 70: 70 2 9 2 1 71: 71 8 7 1 2 72: 72 1 2 4 0 73: 73 3 8 3 2 74: 74 3 2 0 0 75: 75 3 1 2 0 76: 76 10 3 1 0 77: 77 6 4 0 0 78: 78 9 8 0 2 79: 79 4 6 0 1 80: 80 1 9 0 2 81: 81 3 2 1 0 82: 82 5 8 0 1 83: 83 7 5 1 1 84: 84 3 8 4 2 85: 85 10 1 1 2 86: 86 5 10 1 0 87: 87 2 7 3 1 88: 88 7 4 1 2 89: 89 3 1 3 0 90: 90 2 3 0 2 91: 91 10 9 1 0 92: 92 1 10 2 2 93: 93 8 5 1 1 94: 94 9 3 1 0 95: 95 5 6 1 1 96: 96 3 10 0 2 97: 97 2 7 4 1 98: 98 7 3 0 0 99: 99 8 7 2 2 100: 100 4 5 0 0 eventID sender target inertia reciprocity > > > > ### *