Skip to content

Commit

Permalink
fixes for S3methods problems in checks
Browse files Browse the repository at this point in the history
  • Loading branch information
AlexChristensen committed Jan 28, 2024
1 parent 977219f commit 618b093
Show file tree
Hide file tree
Showing 5 changed files with 41 additions and 56 deletions.
11 changes: 3 additions & 8 deletions DESCRIPTION
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
Package: transforEmotion
Title: Sentiment Analysis for Text, Image and Video using Transformer Models
Version: 0.1.5
Date: 2024-01-27
Date: 2024-01-28
Authors@R: c(person("Alexander", "Christensen", email = "[email protected]",
role = "aut", comment = c(ORCID = "0000-0002-9798-7037")),
person("Hudson", "Golino", email = "[email protected]", role = "aut",
Expand All @@ -12,13 +12,8 @@ Maintainer: Aleksandar Tomašević <[email protected]>
Description: Implements sentiment analysis using huggingface <https://huggingface.co> transformer zero-shot classification model pipelines for text and image data. The default text pipeline is Cross-Encoder's DistilRoBERTa <https://huggingface.co/cross-encoder/nli-distilroberta-base> and default image/video pipeline is Open AI's CLIP <https://huggingface.co/openai/clip-vit-base-patch32>. All other zero-shot classification model pipelines can be implemented using their model name from <https://huggingface.co/models?pipeline_tag=zero-shot-classification>.
License: GPL (>= 3.0)
Encoding: UTF-8
Imports: reticulate, pbapply, googledrive, LSAfun, dplyr, remotes, Matrix
Suggests:
markdown,
knitr,
rmarkdown,
rstudioapi,
testthat (>= 3.0.0)
Imports: dplyr, googledrive, LSAfun, Matrix, methods, pbapply, remotes, reticulate
Suggests: knitr, markdown, rmarkdown, rstudioapi, testthat (>= 3.0.0)
VignetteBuilder: knitr
RoxygenNote: 7.3.1
Config/testthat/edition: 3
58 changes: 32 additions & 26 deletions R/rag.R
Original file line number Diff line number Diff line change
Expand Up @@ -66,6 +66,12 @@
#'
#' These values depend on the number and quality of texts. Adjust as necessary
#'
#' @param device Character.
#' Whether to use CPU or GPU for inference.
#' Defaults to \code{"auto"} which will use
#' GPU over CPU (if CUDA-capable GPU is setup).
#' Set to \code{"cpu"} to perform over CPU
#'
#' @param keep_in_env Boolean (length = 1).
#' Whether the classifier should be kept in your global environment.
#' Defaults to \code{TRUE}.
Expand Down Expand Up @@ -243,7 +249,7 @@ rag <- function(
)

}

# Clean-up response
response <- response_cleanup(
extracted_query$response, transformer = transformer
Expand All @@ -257,44 +263,44 @@ rag <- function(

}

#' S3method 'print'
#' @exportS3Method
# S3method 'print'
# Updated 25.01.2024
print.rag <- function(rag_object){
cat(rag_object)
print.rag <- function(x, ...){
cat(x)
}

#' S3method 'summary'
#' @exportS3Method
# S3method 'summary'
# Updated 25.01.2024
summary.rag <- function(rag_object){
cat(rag_object)
summary.rag <- function(object, ...){
cat(object)
}

#' Clean up response
#' @noRd
# Updated 28.01.2024
response_cleanup <- function(response, transformer){

# Trim whitespace first!
response <- trimws(response)

# Return on switch
return(
switch(
transformer,
"tinyllama" = response,
"llama-2" = response,
"mistral-7b" = gsub(
"(\\d+)", "\\\n\\1",
gsub("\n---.*", "", response),
"(\\d+)", "\\\n\\1",
gsub("\n---.*", "", response),
perl = TRUE
),
"phi-2" = gsub("\\\n\\\n.*", "", response),
"orca-2" = response
)
)

}

#' Set up for TinyLLAMA
Expand Down Expand Up @@ -330,22 +336,22 @@ setup_llama2 <- function(llama_index, prompt, device)

# # Check for {llama-cpp-python} install
# if(!"llama-cpp-python" %in% reticulate::py_list_packages(envname = "transforEmotion")$package){
#
#
# # Get operating system
# OS <- system.check()$OS
#
#
# # Check for operating system
# if(OS == "linux"){
#
#
# # Should be good to go...
# reticulate::conda_install(
# envname = "transforEmotion",
# packages = "llama-cpp-python",
# pip = TRUE
# )
#
#
# }else{
#
#
# # Try it out...
# install_try <- try(
# reticulate::conda_install(
Expand All @@ -354,37 +360,37 @@ setup_llama2 <- function(llama_index, prompt, device)
# pip = TRUE
# ), silent = TRUE
# )
#
#
# # Catch the error
# if(is(install_try, "try-error")){
#
#
# # Send error on how to install
# if(OS == "windows"){
#
#
# stop(
# paste0(
# "{llama-cpp-python} failed installation. ",
# "Follow these instructions and try again:\n\n",
# "https://llama-cpp-python.readthedocs.io/"
# ), call. = FALSE
# )
#
#
# }else{ # Mac
#
#
# stop(
# paste0(
# "{llama-cpp-python} failed installation. ",
# "Follow these instructions and try again:\n\n",
# "https://llama-cpp-python.readthedocs.io/"
# ), call. = FALSE
# )
#
#
# }
#
#
# }
#
#
# }
#
#
# }

# Return model
Expand Down
11 changes: 0 additions & 11 deletions man/print.rag.Rd

This file was deleted.

6 changes: 6 additions & 0 deletions man/rag.Rd

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

11 changes: 0 additions & 11 deletions man/summary.rag.Rd

This file was deleted.

0 comments on commit 618b093

Please sign in to comment.