From 8aa15cceb221397c988e6dbdf15636c42f48c21e Mon Sep 17 00:00:00 2001 From: mishoryu <22555149+mishoryu@users.noreply.github.com> Date: Sat, 23 Nov 2019 18:51:14 +1100 Subject: [PATCH] CRAN version (#28) * Updated CRAN notes, README, Rd files * Updated pkgdown site --- DESCRIPTION | 2 +- R/AddUserData.R | 3 +- README.md | 6 +- cran-comments.md | 4 +- docs/index.html | 262 +++++++++--------- docs/pkgdown.yml | 2 +- docs/reference/Authenticate.twitter.html | 11 +- docs/reference/Collect.reddit.html | 3 +- docs/reference/Collect.twitter.html | 23 +- docs/reference/Collect.youtube.html | 10 +- docs/reference/Create.semantic.twitter.html | 13 +- docs/reference/Create.twomode.twitter.html | 3 +- ...SML-colon-colon-AddText.actor.youtube.html | 3 +- ...colon-colon-AddUserData.actor.twitter.html | 13 +- ...olon-colon-AddVideoData.actor.youtube.html | 9 +- man/Authenticate.twitter.Rd | 11 +- man/Collect.reddit.Rd | 3 +- man/Collect.twitter.Rd | 28 +- man/Collect.youtube.Rd | 10 +- man/Create.semantic.twitter.Rd | 13 +- man/Create.twomode.twitter.Rd | 3 +- ...onSML-colon-colon-AddText.actor.youtube.Rd | 3 +- ...L-colon-colon-AddUserData.actor.twitter.Rd | 13 +- ...-colon-colon-AddVideoData.actor.youtube.Rd | 9 +- 24 files changed, 267 insertions(+), 193 deletions(-) diff --git a/DESCRIPTION b/DESCRIPTION index e986017..7990e86 100644 --- a/DESCRIPTION +++ b/DESCRIPTION @@ -13,7 +13,7 @@ Encoding: UTF-8 Author: Timothy Graham, Robert Ackland, Chung-hong Chan, Bryan Gertzel Maintainer: Bryan Gertzel License: GPL (>= 3) -RoxygenNote: 6.1.1 +RoxygenNote: 7.0.1 NeedsCompilation: no URL: https://github.com/vosonlab/vosonSML BugReports: https://github.com/vosonlab/vosonSML/issues diff --git a/R/AddUserData.R b/R/AddUserData.R index e89581e..50d9683 100644 --- a/R/AddUserData.R +++ b/R/AddUserData.R @@ -58,7 +58,8 @@ AddUserData.actor.default <- function(net, ...) { #' @examples #' \dontrun{ #' # add user info to a twitter actor network -#' actorNetwork <- collectData %>% Create("actor") %>% AddUserData(collectData, twitterAuth = myTwitterAuth) +#' actorNetwork <- collectData %>% +#' Create("actor") %>% AddUserData(collectData, twitterAuth = myTwitterAuth) #' #' # network #' # actorNetwork$nodes diff --git a/README.md b/README.md index 1771570..ef221b7 100644 --- a/README.md +++ b/README.md @@ -20,14 +20,14 @@ Unfortunately we are no longer able to maintain `facebook` and `instagram` colle ## Installation -Install the latest release via CRAN (v0.27.2): +Install the latest release via CRAN (v0.29.4): ```R install.packages("vosonSML") ``` -Install the latest release via GitHub (v0.29.3): +Install the latest release via GitHub (v0.29.4): ```R -install.packages("https://github.com/vosonlab/vosonSML/releases/download/v0.29.3/vosonSML-0.29.3.tar.gz", +install.packages("https://github.com/vosonlab/vosonSML/releases/download/v0.29.4/vosonSML-0.29.4.tar.gz", repo = NULL, type = "source") ``` diff --git a/cran-comments.md b/cran-comments.md index 26be036..54e5b31 100644 --- a/cran-comments.md +++ b/cran-comments.md @@ -1,7 +1,7 @@ ## Test environments * local MacOS X, R 3.6.0 -* local Windows 10, R 3.6.0 -* R-Devel r76784 Windows (Winbuilder) +* local Windows 10, R 3.6.1 +* R-Devel r77446 Windows (Winbuilder) ## R CMD check results 0 errors | 0 warnings diff --git a/docs/index.html b/docs/index.html index b5c5d55..a5ea187 100644 --- a/docs/index.html +++ b/docs/index.html @@ -96,14 +96,14 @@

Installation

-

Install the latest release via CRAN (v0.27.2):

-
install.packages("vosonSML")
-

Install the latest release via GitHub (v0.29.3):

-
install.packages("https://github.com/vosonlab/vosonSML/releases/download/v0.29.3/vosonSML-0.29.3.tar.gz", 
-  repo = NULL, type = "source")
+

Install the latest release via CRAN (v0.29.4):

+
install.packages("vosonSML")
+

Install the latest release via GitHub (v0.29.4):

+
install.packages("https://github.com/vosonlab/vosonSML/releases/download/v0.29.4/vosonSML-0.29.4.tar.gz", 
+  repo = NULL, type = "source")

Install the latest development version (v0.29.4):

- +
# library(devtools)
+devtools::install_github("vosonlab/vosonSML")

@@ -125,60 +125,60 @@

‘Authenticate’ with the Twitter API

- +
library(magrittr)
+library(vosonSML)
+
+myKeys <- list(appName = "My App", apiKey = "xxxxxxxxxxxx", apiSecret = "xxxxxxxxxxxx", 
+               accessToken = "xxxxxxxxxxxx", accessTokenSecret = "xxxxxxxxxxxx")
+  
+twitterAuth <- Authenticate("twitter", appName = myKeys$appName, apiKey = myKeys$apiKey, 
+                            apiSecret = myKeys$apiSecret, accessToken = myKeys$accessToken,
+                            accessTokenSecret = myKeys$accessTokenSecret)
+
+# twitter authentication creates an access token as part of the auth object
+# this can and should be re-used by saving it and then loading it for future sessions
+# save the auth object after authenticate 
+saveRDS(twitterAuth, file = "~/.twitter_auth")
+
+# load a previously saved auth object for use in collect
+twitterAuth <- readRDS("~/.twitter_auth")

‘Collect’ tweets for the ‘#auspol’ hashtag

- +
# collect 100 recent tweets
+twitterData <- twitterAuth %>%
+               Collect(searchTerm = "#auspol", searchType = "recent", numTweets = 100, 
+                       includeRetweets = FALSE, retryOnRateLimit = TRUE, writeToFile = TRUE, 
+                       verbose = TRUE)

‘Create’ twitter ‘activity’, ‘actor’, ‘semantic’ and ‘twomode’ network graphs

- +
## activity network - nodes are tweets
+
+activityNetwork <- twitterData %>% Create("activity")
+activityGraph <- activityNetwork %>% Graph() # igraph network graph
+
+## actor network - nodes are users who have tweeted
+
+actorNetwork <- twitterData %>% Create("actor")
+actorGraph <- actorNetwork %>% Graph() # igraph network graph
+
+## semantic network - relationships between concepts - nodes are common terms, hashtags
+## and actors
+
+remItems <- c("#auspol", "auspol") # exclude these terms
+topTerms <- 5                      # include only the top 5% most frequent terms as nodes
+semanticNetwork <- twitterData %>% Create("semantic", removeTermsOrHashtags = remItems, 
+                                          termFreq = topTerms)
+semanticGraph <- semanticNetwork %>% Graph(writeToFile = TRUE, directed = FALSE)
+
+## twomode network - nodes are actors and hashtags
+
+remItems <- c("#auspol") # exclude these hashtags
+twomodeNetwork <- twitterData %>% Create("twomode", removeTermsOrHashtags = remItems)
+twomodeGraph <- twomodeNetwork %>% Graph(writeToFile = TRUE)

@@ -187,28 +187,28 @@

‘Authenticate’, ‘Collect’ and ‘Create’ network graphs from youtube video comments

- +
library(magrittr)
+library(vosonSML)
+
+myYoutubeAPIKey <- "xxxxxxxxxxxxxx"
+
+# helper to create a list of youtube video ids from urls
+myYoutubeVideoIds <- GetYoutubeVideoIDs(c("https://www.youtube.com/watch?v=xxxxxxxx",
+                                          "https://youtu.be/xxxxxxxx"))
+
+# authenticate and collect 100 top-level comments per youtube video in list
+# also collects reply-comments for each top-level comment
+youtubeData <- Authenticate("youtube", apiKey = myYoutubeAPIKey) %>%
+               Collect(videoIDs = myYoutubeVideoIds, maxComments = 100)
+
+## activity network - nodes are comments and videos
+
+activityNetwork <- youtubeData %>% Create("activity") %>% AddText(youtubeData)
+activityGraph <- activityNetwork %>% Graph()
+
+## actor network - nodes are users who have posted comments
+
+actorGraph <- youtubeData %>% Create("actor") %>% AddText(youtubeData) %>% Graph()

@@ -217,25 +217,25 @@

‘Collect’ and ‘Create’ reddit networks from a subreddit thread

- +
library(magrittr)
+library(vosonSML)
+
+# collect reddit comment threads
+myThreadUrls <- c("https://www.reddit.com/r/xxxxxx/comments/xxxxxx/x_xxxx_xxxxxxxxx/")
+
+# authentication does not require credentials
+redditData <- Authenticate("reddit") %>%
+              Collect(threadUrls = myThreadUrls, waitTime = 5)
+              
+## activity network - nodes are comments and intital thread posts
+
+activityNetwork <- redditData %>% Create("activity")
+activityGraph <- activityNetwork %>% Graph(writeToFile = TRUE)
+
+## actor network - nodes are users who have posted comments
+
+# create an actor network with comment text as edge attribute
+actorGraph <- redditData %>% Create("actor") %>% AddText(redditData) %>% Graph()

@@ -244,56 +244,56 @@

‘AddText’ adds collected text data to networks as node or edge attributes

- +
# applies to twitter, youtube and reddit - activity and actor networks
+
+# graph for activity network with text data added as node attribute
+activityNetworkGraph <- twitterData %>% Create("activity") %>% AddText(twitterData) %>%
+                        Graph()
+                        
+# AddText will also redirect some edges in a youtube actor network by finding user
+# references at the beginning of reply comments text
+# i.e a reply comment from user_B to top-level comment by user_A
+#     user_B: "@user_C A very fine point!"
+# this would typically create an edge between user_B -> user_A, however the parameter
+# 'replies_from_text' redirects this edge instead from user_B -> user_C as per the
+# reference in the comment text - set this to 'FALSE' to ignore comment references
+
+actorNetworkGraph <- youtubeData %>% Create("actor") %>% 
+                     AddText(youtubeData, replies_from_text = TRUE) %>% Graph()

‘AddUserData’ requests and adds user profile data to networks

- +
# applies only to twitter actor networks
+
+# add additional twitter user profile info to actor network graph as node attributes
+actorGraphWithUserAttr <- actorNetwork %>% 
+                          AddUserData(twitterData, 
+                                      lookupUsers = TRUE,
+                                      twitterAuth = twitterAuth) %>% Graph()

‘AddVideoData’ requests and adds video data to networks

- +
# applies only to youtube actor networks
+
+# replaces 'VIDEOID:xxxxxx' references in actor network with their publishers
+# user id (channel ID) and adds additional collected youtube video info to actor
+# network graph as node attributes
+
+# if only want the video id substitution use the 'actorSubOnly = TRUE' parameter
+actorGraphWithVideos <- actorNetwork %>% AddVideoData(youtubeAuth,
+                                                      actorSubOnly = FALSE) %>% Graph()

Save and Load Authentication Objects

Save and reuse twitter and youtube authentication objects in future sessions.

- +
# save the object after 'Authenticate' 
+saveRDS(myYoutubeAuth, file = "~/.youtube_auth")
+
+# load a previously saved authentication object for use in 'Collect'
+myYoutubeAuth <- readRDS("~/.youtube_auth")

For more detailed function information and examples, please refer to the Reference page.

diff --git a/docs/pkgdown.yml b/docs/pkgdown.yml index 36d5f21..e9c4f4e 100644 --- a/docs/pkgdown.yml +++ b/docs/pkgdown.yml @@ -1,4 +1,4 @@ -pandoc: '2.6' +pandoc: 2.7.3 pkgdown: 1.4.1 pkgdown_sha: ~ articles: [] diff --git a/docs/reference/Authenticate.twitter.html b/docs/reference/Authenticate.twitter.html index 9b3ee1d..bffddda 100644 --- a/docs/reference/Authenticate.twitter.html +++ b/docs/reference/Authenticate.twitter.html @@ -132,8 +132,15 @@

Twitter API authentication

# S3 method for twitter
-Authenticate(socialmedia, appName, apiKey, apiSecret,
-  accessToken, accessTokenSecret, ...)
+Authenticate( + socialmedia, + appName, + apiKey, + apiSecret, + accessToken, + accessTokenSecret, + ... +)

Arguments

diff --git a/docs/reference/Collect.reddit.html b/docs/reference/Collect.reddit.html index 314ed17..14cf7d4 100644 --- a/docs/reference/Collect.reddit.html +++ b/docs/reference/Collect.reddit.html @@ -130,8 +130,7 @@

Collect comments data from reddit threads

# S3 method for reddit
-Collect(credential, threadUrls, waitTime = 5,
-  writeToFile = FALSE, ...)
+Collect(credential, threadUrls, waitTime=5, writeToFile=FALSE, ...)

Arguments

diff --git a/docs/reference/Collect.twitter.html b/docs/reference/Collect.twitter.html index ae33196..de22d4c 100644 --- a/docs/reference/Collect.twitter.html +++ b/docs/reference/Collect.twitter.html @@ -152,10 +152,17 @@

Collect tweet data from twitter search

# S3 method for twitter
-Collect(credential, searchTerm = "",
-  searchType = "recent", numTweets = 100, includeRetweets = TRUE,
-  retryOnRateLimit = FALSE, writeToFile = FALSE, verbose = FALSE,
-  ...)
+Collect( + credential, + searchTerm="", + searchType="recent", + numTweets=100, + includeRetweets=TRUE, + retryOnRateLimit=FALSE, + writeToFile=FALSE, + verbose=FALSE, + ... +)

Arguments

@@ -197,10 +204,10 @@

Arg

...

Arguments passed on to rtweet::search_tweets

-
geocode

Geographical limiter of the template +

geocode

Geographical limiter of the template "latitude,longitude,radius" e.g., geocode = "37.78,-122.40,1mi".

-
max_id

Character, returns results with an ID less +

max_id

Character, returns results with an ID less than (that is, older than) or equal to `max_id`. Especially useful for large data returns that require multiple iterations interrupted by user time constraints. For searches exceeding @@ -213,7 +220,7 @@

Arg leverage retryonratelimit for sets of tweets and max_id to allow results to continue where previous efforts left off.

-
parse

Logical, indicating whether to return parsed +

parse

Logical, indicating whether to return parsed data.frame, if true, or nested list, if false. By default, parse = TRUE saves users from the wreck of time and frustration associated with disentangling the nasty nested list @@ -226,7 +233,7 @@

Arg Twitter. However, users may occasionally encounter new or omitted variables. In these rare cases, the nested list object will be the only way to access these variables.

- +
diff --git a/docs/reference/Collect.youtube.html b/docs/reference/Collect.youtube.html index ad13610..26c3254 100644 --- a/docs/reference/Collect.youtube.html +++ b/docs/reference/Collect.youtube.html @@ -146,8 +146,14 @@

Collect comments data for youtube videos

# S3 method for youtube
-Collect(credential, videoIDs, verbose = FALSE,
-  writeToFile = FALSE, maxComments = 1e+13, ...)
+Collect( + credential, + videoIDs, + verbose = FALSE, + writeToFile = FALSE, + maxComments = 1e+13, + ... +)

Arguments

diff --git a/docs/reference/Create.semantic.twitter.html b/docs/reference/Create.semantic.twitter.html index 964edb5..200b5a0 100644 --- a/docs/reference/Create.semantic.twitter.html +++ b/docs/reference/Create.semantic.twitter.html @@ -134,9 +134,16 @@

Create twitter semantic network

# S3 method for semantic.twitter
-Create(datasource, type,
-  removeTermsOrHashtags = NULL, stopwordsEnglish = TRUE,
-  termFreq = 5, hashtagFreq = 50, verbose = FALSE, ...)
+Create( + datasource, + type, + removeTermsOrHashtags=NULL, + stopwordsEnglish=TRUE, + termFreq=5, + hashtagFreq=50, + verbose=FALSE, + ... +)

Arguments

diff --git a/docs/reference/Create.twomode.twitter.html b/docs/reference/Create.twomode.twitter.html index 81c0574..16e62c1 100644 --- a/docs/reference/Create.twomode.twitter.html +++ b/docs/reference/Create.twomode.twitter.html @@ -134,8 +134,7 @@

Create twitter twomode network

# S3 method for twomode.twitter
-Create(datasource, type,
-  removeTermsOrHashtags = NULL, verbose = FALSE, ...)
+Create(datasource, type, removeTermsOrHashtags=NULL, verbose=FALSE, ...)

Arguments

diff --git a/docs/reference/vosonSML-colon-colon-AddText.actor.youtube.html b/docs/reference/vosonSML-colon-colon-AddText.actor.youtube.html index fea2eea..3de9b72 100644 --- a/docs/reference/vosonSML-colon-colon-AddText.actor.youtube.html +++ b/docs/reference/vosonSML-colon-colon-AddText.actor.youtube.html @@ -132,8 +132,7 @@

Add columns containing text data to youtube actor network dataframes

# S3 method for actor.youtube
-AddText(net, data, replies_from_text = FALSE,
-  at_replies_only = TRUE, ...)
+AddText(net, data, replies_from_text=FALSE, at_replies_only=TRUE, ...)

Arguments

diff --git a/docs/reference/vosonSML-colon-colon-AddUserData.actor.twitter.html b/docs/reference/vosonSML-colon-colon-AddUserData.actor.twitter.html index 9aa81ca..743c630 100644 --- a/docs/reference/vosonSML-colon-colon-AddUserData.actor.twitter.html +++ b/docs/reference/vosonSML-colon-colon-AddUserData.actor.twitter.html @@ -128,8 +128,14 @@

Supplement twitter actor network by adding user profile attributes to nodes<
# S3 method for actor.twitter
-AddUserData(net, data, lookupUsers = TRUE,
-  twitterAuth = NULL, verbose = TRUE, ...)
+AddUserData( + net, + data, + lookupUsers = TRUE, + twitterAuth = NULL, + verbose = TRUE, + ... +)

Arguments

@@ -174,7 +180,8 @@

Note

Examples

if (FALSE) { # add user info to a twitter actor network -actorNetwork <- collectData %>% Create("actor") %>% AddUserData(collectData, twitterAuth = myTwitterAuth) +actorNetwork <- collectData %>% + Create("actor") %>% AddUserData(collectData, twitterAuth = myTwitterAuth) # network # actorNetwork$nodes diff --git a/docs/reference/vosonSML-colon-colon-AddVideoData.actor.youtube.html b/docs/reference/vosonSML-colon-colon-AddVideoData.actor.youtube.html index 9ae93ac..25d3a18 100644 --- a/docs/reference/vosonSML-colon-colon-AddVideoData.actor.youtube.html +++ b/docs/reference/vosonSML-colon-colon-AddVideoData.actor.youtube.html @@ -134,8 +134,13 @@

Add video information to youtube actor network dataframes

# S3 method for actor.youtube
-AddVideoData(net, youtubeAuth = NULL,
-  videoIds = NULL, actorSubOnly = FALSE, ...)
+AddVideoData( + net, + youtubeAuth = NULL, + videoIds = NULL, + actorSubOnly = FALSE, + ... +)

Arguments

diff --git a/man/Authenticate.twitter.Rd b/man/Authenticate.twitter.Rd index 651aa5e..8961044 100644 --- a/man/Authenticate.twitter.Rd +++ b/man/Authenticate.twitter.Rd @@ -4,8 +4,15 @@ \alias{Authenticate.twitter} \title{Twitter API authentication} \usage{ -\method{Authenticate}{twitter}(socialmedia, appName, apiKey, apiSecret, - accessToken, accessTokenSecret, ...) +\method{Authenticate}{twitter}( + socialmedia, + appName, + apiKey, + apiSecret, + accessToken, + accessTokenSecret, + ... +) } \arguments{ \item{socialmedia}{Character string. Identifier for social media API to authenticate, set to \code{"twitter"}.} diff --git a/man/Collect.reddit.Rd b/man/Collect.reddit.Rd index fa32ae4..f63c19a 100644 --- a/man/Collect.reddit.Rd +++ b/man/Collect.reddit.Rd @@ -4,8 +4,7 @@ \alias{Collect.reddit} \title{Collect comments data from reddit threads} \usage{ -\method{Collect}{reddit}(credential, threadUrls, waitTime = 5, - writeToFile = FALSE, ...) +\method{Collect}{reddit}(credential, threadUrls, waitTime = 5, writeToFile = FALSE, ...) } \arguments{ \item{credential}{A \code{credential} object generated from \code{Authenticate} with class name \code{"reddit"}.} diff --git a/man/Collect.twitter.Rd b/man/Collect.twitter.Rd index bb3abcd..50b6ec8 100644 --- a/man/Collect.twitter.Rd +++ b/man/Collect.twitter.Rd @@ -4,10 +4,17 @@ \alias{Collect.twitter} \title{Collect tweet data from twitter search} \usage{ -\method{Collect}{twitter}(credential, searchTerm = "", - searchType = "recent", numTweets = 100, includeRetweets = TRUE, - retryOnRateLimit = FALSE, writeToFile = FALSE, verbose = FALSE, - ...) +\method{Collect}{twitter}( + credential, + searchTerm = "", + searchType = "recent", + numTweets = 100, + includeRetweets = TRUE, + retryOnRateLimit = FALSE, + writeToFile = FALSE, + verbose = FALSE, + ... +) } \arguments{ \item{credential}{A \code{credential} object generated from \code{Authenticate} with class name \code{"twitter"}.} @@ -28,12 +35,13 @@ the hashtag \code{"#auspol"}.} \item{verbose}{Logical. Output additional information about the data collection. Default is \code{FALSE}.} -\item{...}{Arguments passed on to \code{rtweet::search_tweets} -\describe{ - \item{geocode}{Geographical limiter of the template +\item{...}{ + Arguments passed on to \code{\link[rtweet:search_tweets]{rtweet::search_tweets}} + \describe{ + \item{\code{geocode}}{Geographical limiter of the template "latitude,longitude,radius" e.g., \code{geocode = "37.78,-122.40,1mi"}.} - \item{max_id}{Character, returns results with an ID less + \item{\code{max_id}}{Character, returns results with an ID less than (that is, older than) or equal to `max_id`. Especially useful for large data returns that require multiple iterations interrupted by user time constraints. For searches exceeding @@ -46,7 +54,7 @@ multiple days. In these cases, it would likely be useful to leverage \code{retryonratelimit} for sets of tweets and \code{max_id} to allow results to continue where previous efforts left off.} - \item{parse}{Logical, indicating whether to return parsed + \item{\code{parse}}{Logical, indicating whether to return parsed data.frame, if true, or nested list, if false. By default, \code{parse = TRUE} saves users from the wreck of time and frustration associated with disentangling the nasty nested list @@ -59,7 +67,7 @@ returns nearly all bits of information returned from Twitter. However, users may occasionally encounter new or omitted variables. In these rare cases, the nested list object will be the only way to access these variables.} -}} + }} } \value{ A data.frame object with class names \code{"datasource"} and \code{"twitter"}. diff --git a/man/Collect.youtube.Rd b/man/Collect.youtube.Rd index 3c8fad4..ecfc4ea 100644 --- a/man/Collect.youtube.Rd +++ b/man/Collect.youtube.Rd @@ -4,8 +4,14 @@ \alias{Collect.youtube} \title{Collect comments data for youtube videos} \usage{ -\method{Collect}{youtube}(credential, videoIDs, verbose = FALSE, - writeToFile = FALSE, maxComments = 1e+13, ...) +\method{Collect}{youtube}( + credential, + videoIDs, + verbose = FALSE, + writeToFile = FALSE, + maxComments = 1e+13, + ... +) } \arguments{ \item{credential}{A \code{credential} object generated from \code{Authenticate} with class name \code{"youtube"}.} diff --git a/man/Create.semantic.twitter.Rd b/man/Create.semantic.twitter.Rd index b99b053..458fec7 100644 --- a/man/Create.semantic.twitter.Rd +++ b/man/Create.semantic.twitter.Rd @@ -4,9 +4,16 @@ \alias{Create.semantic.twitter} \title{Create twitter semantic network} \usage{ -\method{Create}{semantic.twitter}(datasource, type, - removeTermsOrHashtags = NULL, stopwordsEnglish = TRUE, - termFreq = 5, hashtagFreq = 50, verbose = FALSE, ...) +\method{Create}{semantic.twitter}( + datasource, + type, + removeTermsOrHashtags = NULL, + stopwordsEnglish = TRUE, + termFreq = 5, + hashtagFreq = 50, + verbose = FALSE, + ... +) } \arguments{ \item{datasource}{Collected social media data with \code{"datasource"} and \code{"twitter"} class names.} diff --git a/man/Create.twomode.twitter.Rd b/man/Create.twomode.twitter.Rd index 6f9d095..c81300f 100644 --- a/man/Create.twomode.twitter.Rd +++ b/man/Create.twomode.twitter.Rd @@ -4,8 +4,7 @@ \alias{Create.twomode.twitter} \title{Create twitter twomode network} \usage{ -\method{Create}{twomode.twitter}(datasource, type, - removeTermsOrHashtags = NULL, verbose = FALSE, ...) +\method{Create}{twomode.twitter}(datasource, type, removeTermsOrHashtags = NULL, verbose = FALSE, ...) } \arguments{ \item{datasource}{Collected social media data with \code{"datasource"} and \code{"twitter"} class names.} diff --git a/man/vosonSML-colon-colon-AddText.actor.youtube.Rd b/man/vosonSML-colon-colon-AddText.actor.youtube.Rd index 7274f13..7ffd346 100644 --- a/man/vosonSML-colon-colon-AddText.actor.youtube.Rd +++ b/man/vosonSML-colon-colon-AddText.actor.youtube.Rd @@ -5,8 +5,7 @@ \alias{AddText.actor.youtube} \title{Add columns containing text data to youtube actor network dataframes} \usage{ -\method{AddText}{actor.youtube}(net, data, replies_from_text = FALSE, - at_replies_only = TRUE, ...) +\method{AddText}{actor.youtube}(net, data, replies_from_text = FALSE, at_replies_only = TRUE, ...) } \arguments{ \item{net}{A named list of dataframes \code{nodes} and \code{edges} generated by \code{Create}.} diff --git a/man/vosonSML-colon-colon-AddUserData.actor.twitter.Rd b/man/vosonSML-colon-colon-AddUserData.actor.twitter.Rd index 14248e6..672d89f 100644 --- a/man/vosonSML-colon-colon-AddUserData.actor.twitter.Rd +++ b/man/vosonSML-colon-colon-AddUserData.actor.twitter.Rd @@ -5,8 +5,14 @@ \alias{AddUserData.actor.twitter} \title{Supplement twitter actor network by adding user profile attributes to nodes} \usage{ -\method{AddUserData}{actor.twitter}(net, data, lookupUsers = TRUE, - twitterAuth = NULL, verbose = TRUE, ...) +\method{AddUserData}{actor.twitter}( + net, + data, + lookupUsers = TRUE, + twitterAuth = NULL, + verbose = TRUE, + ... +) } \arguments{ \item{net}{A named list of dataframes \code{nodes} and \code{edges} generated by \code{Create}.} @@ -37,7 +43,8 @@ before hitting the rate limit. It does not wait and retry upon hitting rate limi \examples{ \dontrun{ # add user info to a twitter actor network -actorNetwork <- collectData \%>\% Create("actor") \%>\% AddUserData(collectData, twitterAuth = myTwitterAuth) +actorNetwork <- collectData \%>\% + Create("actor") \%>\% AddUserData(collectData, twitterAuth = myTwitterAuth) # network # actorNetwork$nodes diff --git a/man/vosonSML-colon-colon-AddVideoData.actor.youtube.Rd b/man/vosonSML-colon-colon-AddVideoData.actor.youtube.Rd index b96fec8..3534274 100644 --- a/man/vosonSML-colon-colon-AddVideoData.actor.youtube.Rd +++ b/man/vosonSML-colon-colon-AddVideoData.actor.youtube.Rd @@ -5,8 +5,13 @@ \alias{AddVideoData.actor.youtube} \title{Add video information to youtube actor network dataframes} \usage{ -\method{AddVideoData}{actor.youtube}(net, youtubeAuth = NULL, - videoIds = NULL, actorSubOnly = FALSE, ...) +\method{AddVideoData}{actor.youtube}( + net, + youtubeAuth = NULL, + videoIds = NULL, + actorSubOnly = FALSE, + ... +) } \arguments{ \item{net}{A named list of dataframes \code{nodes} and \code{edges} generated by \code{Create}.}