Skip to content

Commit

Permalink
Disabled collect metadata log file
Browse files Browse the repository at this point in the history
This is to address issue #56 .
  • Loading branch information
bryn-g committed May 25, 2024
1 parent 46f44d4 commit d19d0a5
Show file tree
Hide file tree
Showing 75 changed files with 136 additions and 115 deletions.
2 changes: 1 addition & 1 deletion DESCRIPTION
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
Package: vosonSML
Version: 0.34.2
Version: 0.34.3
Title: Collecting Social Media Data and Generating Networks for Analysis
Description: A suite of easy to use functions for collecting social media
data and generating networks for analysis. Supports Mastodon, YouTube,
Expand Down
6 changes: 6 additions & 0 deletions NEWS.md
Original file line number Diff line number Diff line change
@@ -1,3 +1,9 @@
# vosonSML 0.34.3

## Bug Fixes
- Disabled metadata logging that occurs when the `writeToFile` parameter of `Collect` is used. This is due to a new
package issue with R version 4.4.

# vosonSML 0.34.2

## Bug Fixes
Expand Down
14 changes: 7 additions & 7 deletions R/Collect.R
Original file line number Diff line number Diff line change
Expand Up @@ -17,13 +17,13 @@
Collect <- function(credential, ...) {
msg <- f_verbose(check_dots("verbose", ...))

ts_ <- Sys.time()
collect_log <- c(
paste0("collect.", credential$socialmedia),
paste0(format(ts_, "%a %b %d %X %Y")),
paste0(format(ts_, tz = "UTC", usetz = TRUE)), "",
paste0(names(list(...)), " = ", as.character(list(...)), collapse = "\n")
)
# ts_ <- Sys.time()
# collect_log <- c(
# paste0("collect.", credential$socialmedia),
# paste0(format(ts_, "%a %b %d %X %Y")),
# paste0(format(ts_, tz = "UTC", usetz = TRUE)), "",
# paste0(names(list(...)), " = ", as.character(list(...)), collapse = "\n")
# )

# set the environment encoding to UTF-8 for data collection
saved_enc <- getOption("encoding")
Expand Down
3 changes: 2 additions & 1 deletion R/Collect.listing.reddit.R
Original file line number Diff line number Diff line change
Expand Up @@ -121,7 +121,8 @@ Collect.listing.reddit <-

class(listing_df) <- append(c("listing", "reddit"), class(listing_df))

meta_log <- c(collect_log, paste0(format(Sys.time(), "%a %b %d %X %Y")))
# meta_log <- c(collect_log, paste0(format(Sys.time(), "%a %b %d %X %Y")))
meta_log <- NULL

if (writeToFile) write_output_file(listing_df, "rds", "RedditListing", verbose = verbose, log = meta_log)

Expand Down
11 changes: 6 additions & 5 deletions R/Collect.search.mastodon.R
Original file line number Diff line number Diff line change
Expand Up @@ -94,11 +94,12 @@ Collect.search.mastodon <-
}
msg(paste0("Collected ", n_posts, " posts.\n"))

meta_log <- c(
collect_log, "",
ifelse(n_posts > 0, print_summary(df_summary), ""),
"", paste0(format(Sys.time(), "%a %b %d %X %Y"))
)
# meta_log <- c(
# collect_log, "",
# ifelse(n_posts > 0, print_summary(df_summary), ""),
# "", paste0(format(Sys.time(), "%a %b %d %X %Y"))
# )
meta_log <- NULL

if (writeToFile) write_output_file(df_posts, "rds", "MastodonData", verbose = verbose, log = meta_log)

Expand Down
11 changes: 6 additions & 5 deletions R/Collect.search.twitter.R
Original file line number Diff line number Diff line change
Expand Up @@ -155,11 +155,12 @@ Collect.search.twitter <-
}
msg(paste0("Collected ", n_tweets, " tweets.\n"))

meta_log <- c(
collect_log, "",
ifelse(n_tweets > 0, print_summary(df_summary), ""),
"", paste0(format(Sys.time(), "%a %b %d %X %Y"))
)
# meta_log <- c(
# collect_log, "",
# ifelse(n_tweets > 0, print_summary(df_summary), ""),
# "", paste0(format(Sys.time(), "%a %b %d %X %Y"))
# )
meta_log <- NULL

if (writeToFile) write_output_file(df_tweets, "rds", "TwitterData", verbose = verbose, log = meta_log)

Expand Down
11 changes: 6 additions & 5 deletions R/Collect.thread.mastodon.R
Original file line number Diff line number Diff line change
Expand Up @@ -60,11 +60,12 @@ Collect.thread.mastodon <-
}
msg(paste0("Collected ", n_posts, " posts.\n"))

meta_log <- c(
collect_log, "",
ifelse(n_posts > 0, print_summary(df_summary), ""),
"", paste0(format(Sys.time(), "%a %b %d %X %Y"))
)
# meta_log <- c(
# collect_log, "",
# ifelse(n_posts > 0, print_summary(df_summary), ""),
# "", paste0(format(Sys.time(), "%a %b %d %X %Y"))
# )
meta_log <- NULL

if (writeToFile) write_output_file(threads_df, "rds", "MastodonData", verbose = verbose, log = meta_log)

Expand Down
3 changes: 2 additions & 1 deletion R/Collect.thread.reddit.R
Original file line number Diff line number Diff line change
Expand Up @@ -102,7 +102,8 @@ Collect.thread.reddit <-

class(threads_df) <- append(c("datasource", "reddit"), class(threads_df))

meta_log <- c(collect_log, paste0(format(Sys.time(), "%a %b %d %X %Y")))
# meta_log <- c(collect_log, paste0(format(Sys.time(), "%a %b %d %X %Y")))
meta_log <- NULL

if (writeToFile) write_output_file(threads_df, "rds", "RedditData", verbose = verbose, log = meta_log)

Expand Down
11 changes: 6 additions & 5 deletions R/Collect.timeline.twitter.R
Original file line number Diff line number Diff line change
Expand Up @@ -160,11 +160,12 @@ Collect.timeline.twitter <-
}
msg(paste0("Collected ", n_tweets, " tweets.\n"))

meta_log <- c(
collect_log, "",
ifelse(n_tweets > 0, print_summary(df_summary), ""),
"", paste0(format(Sys.time(), "%a %b %d %X %Y"))
)
# meta_log <- c(
# collect_log, "",
# ifelse(n_tweets > 0, print_summary(df_summary), ""),
# "", paste0(format(Sys.time(), "%a %b %d %X %Y"))
# )
meta_log <- NULL

if (writeToFile) write_output_file(df_tweets, "rds", "TwitterData", verbose = verbose, log = meta_log)

Expand Down
3 changes: 2 additions & 1 deletion R/Collect.web.R
Original file line number Diff line number Diff line change
Expand Up @@ -67,7 +67,8 @@ Collect.web <-

class(df_results) <- append(c("datasource", "web"), class(df_results))

meta_log <- c(collect_log, paste0(format(Sys.time(), "%a %b %d %X %Y")))
# meta_log <- c(collect_log, paste0(format(Sys.time(), "%a %b %d %X %Y")))
meta_log <- NULL

if (writeToFile) write_output_file(df_results, "rds", "WebData", verbose = verbose, log = meta_log)

Expand Down
3 changes: 2 additions & 1 deletion R/Collect.youtube.R
Original file line number Diff line number Diff line change
Expand Up @@ -336,7 +336,8 @@ Collect.youtube <-

class(dataCombined) <- append(c("datasource", "youtube"), class(dataCombined))

meta_log <- c(collect_log, paste0(format(Sys.time(), "%a %b %d %X %Y")))
# meta_log <- c(collect_log, paste0(format(Sys.time(), "%a %b %d %X %Y")))
meta_log <- NULL

if (writeToFile) write_output_file(dataCombined, "rds", "YoutubeData", verbose = verbose, log = meta_log)

Expand Down
2 changes: 1 addition & 1 deletion README.md
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@
[![CRAN_Monthly](https://cranlogs.r-pkg.org/badges/vosonSML)](https://CRAN.R-project.org/package=vosonSML)
[![CRAN_Total](https://cranlogs.r-pkg.org/badges/grand-total/vosonSML)](https://CRAN.R-project.org/package=vosonSML)
[![Github_Release](https://img.shields.io/github/release-pre/vosonlab/vosonSML.svg?logo=github)](https://github.com/vosonlab/vosonSML/releases)
[![Github_Dev](https://img.shields.io/static/v1?label=dev&message=v0.34.1&logo=github)](https://github.com/vosonlab/vosonSML)
[![Github_Dev](https://img.shields.io/static/v1?label=dev&message=v0.34.3&logo=github)](https://github.com/vosonlab/vosonSML)
[![Last_Commit](https://img.shields.io/github/last-commit/vosonlab/vosonSML.svg?&logo=github)](https://github.com/vosonlab/vosonSML/commits/master)
[![Build_Status](https://github.com/vosonlab/vosonSML/workflows/R-CMD-check/badge.svg)](https://github.com/vosonlab/vosonSML/actions)

Expand Down
2 changes: 1 addition & 1 deletion docs/404.html

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

2 changes: 1 addition & 1 deletion docs/CODE_OF_CONDUCT.html

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

2 changes: 1 addition & 1 deletion docs/LICENSE-text.html

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

Loading

0 comments on commit d19d0a5

Please sign in to comment.