Skip to content

Commit

Permalink
feat(refactor): changed variable names
Browse files Browse the repository at this point in the history
  • Loading branch information
PeriniM committed Apr 30, 2024
1 parent da2c82a commit 8fba7e5
Show file tree
Hide file tree
Showing 13 changed files with 54 additions and 52 deletions.
12 changes: 6 additions & 6 deletions examples/gemini/json_scraper_gemini.py
Original file line number Diff line number Diff line change
@@ -1,10 +1,10 @@
"""
Basic example of scraping pipeline using SmartScraper from JSON documents
Basic example of scraping pipeline using JSONScraperGraph from JSON documents
"""

import os
from dotenv import load_dotenv
from scrapegraphai.graphs import JsonScraperGraph
from scrapegraphai.graphs import JSONScraperGraph
from scrapegraphai.utils import convert_to_csv, convert_to_json, prettify_exec_info
load_dotenv()

Expand Down Expand Up @@ -33,23 +33,23 @@
}

# ************************************************
# Create the JsonScraperGraph instance and run it
# Create the JSONScraperGraph instance and run it
# ************************************************

smart_scraper_graph = JsonScraperGraph(
json_scraper_graph = JSONScraperGraph(
prompt="List me all the authors, title and genres of the books",
source=text, # Pass the content of the file, not the file object
config=graph_config
)

result = smart_scraper_graph.run()
result = json_scraper_graph.run()
print(result)

# ************************************************
# Get graph execution info
# ************************************************

graph_exec_info = smart_scraper_graph.get_execution_info()
graph_exec_info = json_scraper_graph.get_execution_info()
print(prettify_exec_info(graph_exec_info))

# Save to json or csv
Expand Down
12 changes: 6 additions & 6 deletions examples/gemini/xml_scraper_openai.py
Original file line number Diff line number Diff line change
@@ -1,10 +1,10 @@
"""
Basic example of scraping pipeline using XmlScraperGraph from XML documents
Basic example of scraping pipeline using XMLScraperGraph from XML documents
"""

import os
from dotenv import load_dotenv
from scrapegraphai.graphs import XmlScraperGraph
from scrapegraphai.graphs import XMLScraperGraph
from scrapegraphai.utils import convert_to_csv, convert_to_json, prettify_exec_info
load_dotenv()

Expand Down Expand Up @@ -33,23 +33,23 @@
}

# ************************************************
# Create the XmlScraperGraph instance and run it
# Create the XMLScraperGraph instance and run it
# ************************************************

smart_scraper_graph = XmlScraperGraph(
xml_scraper_graph = XMLScraperGraph(
prompt="List me all the authors, title and genres of the books",
source=text, # Pass the content of the file, not the file object
config=graph_config
)

result = smart_scraper_graph.run()
result = xml_scraper_graph.run()
print(result)

# ************************************************
# Get graph execution info
# ************************************************

graph_exec_info = smart_scraper_graph.get_execution_info()
graph_exec_info = xml_scraper_graph.get_execution_info()
print(prettify_exec_info(graph_exec_info))

# Save to json or csv
Expand Down
12 changes: 6 additions & 6 deletions examples/local_models/Docker/json_scraper_docker.py
Original file line number Diff line number Diff line change
@@ -1,10 +1,10 @@
"""
Basic example of scraping pipeline using JsonScraperGraph from JSON documents
Basic example of scraping pipeline using JSONScraperGraph from JSON documents
"""

import os
from dotenv import load_dotenv
from scrapegraphai.graphs import JsonScraperGraph
from scrapegraphai.graphs import JSONScraperGraph
from scrapegraphai.utils import convert_to_csv, convert_to_json, prettify_exec_info
load_dotenv()

Expand Down Expand Up @@ -37,23 +37,23 @@
}

# ************************************************
# Create the JsonScraperGraph instance and run it
# Create the JSONScraperGraph instance and run it
# ************************************************

smart_scraper_graph = JsonScraperGraph(
json_scraper_graph = JSONScraperGraph(
prompt="List me all the authors, title and genres of the books",
source=text, # Pass the content of the file, not the file object
config=graph_config
)

result = smart_scraper_graph.run()
result = json_scraper_graph.run()
print(result)

# ************************************************
# Get graph execution info
# ************************************************

graph_exec_info = smart_scraper_graph.get_execution_info()
graph_exec_info = json_scraper_graph.get_execution_info()
print(prettify_exec_info(graph_exec_info))

# Save to json or csv
Expand Down
12 changes: 6 additions & 6 deletions examples/local_models/Docker/xml_scraper_docker.py
Original file line number Diff line number Diff line change
@@ -1,10 +1,10 @@
"""
Basic example of scraping pipeline using XmlScraperGraph from XML documents
Basic example of scraping pipeline using XMLScraperGraph from XML documents
"""

import os
from dotenv import load_dotenv
from scrapegraphai.graphs import XmlScraperGraph
from scrapegraphai.graphs import XMLScraperGraph
from scrapegraphai.utils import convert_to_csv, convert_to_json, prettify_exec_info
load_dotenv()

Expand Down Expand Up @@ -37,23 +37,23 @@
}

# ************************************************
# Create the XmlScraperGraph instance and run it
# Create the XMLScraperGraph instance and run it
# ************************************************

smart_scraper_graph = XmlScraperGraph(
xml_scraper_graph = XMLScraperGraph(
prompt="List me all the authors, title and genres of the books",
source=text, # Pass the content of the file, not the file object
config=graph_config
)

result = smart_scraper_graph.run()
result = xml_scraper_graph.run()
print(result)

# ************************************************
# Get graph execution info
# ************************************************

graph_exec_info = smart_scraper_graph.get_execution_info()
graph_exec_info = xml_scraper_graph.get_execution_info()
print(prettify_exec_info(graph_exec_info))

# Save to json or csv
Expand Down
12 changes: 6 additions & 6 deletions examples/local_models/Ollama/json_scraper_ollama.py
Original file line number Diff line number Diff line change
@@ -1,10 +1,10 @@
"""
Basic example of scraping pipeline using JsonScraperGraph from JSON documents
Basic example of scraping pipeline using JSONScraperGraph from JSON documents
"""

import os
from dotenv import load_dotenv
from scrapegraphai.graphs import JsonScraperGraph
from scrapegraphai.graphs import JSONScraperGraph
from scrapegraphai.utils import convert_to_csv, convert_to_json, prettify_exec_info
load_dotenv()

Expand Down Expand Up @@ -39,23 +39,23 @@
}

# ************************************************
# Create the XmlScraperGraph instance and run it
# Create the JSONScraperGraph instance and run it
# ************************************************

smart_scraper_graph = JsonScraperGraph(
json_scraper_graph = JSONScraperGraph(
prompt="List me all the authors, title and genres of the books",
source=text, # Pass the content of the file, not the file object
config=graph_config
)

result = smart_scraper_graph.run()
result = json_scraper_graph.run()
print(result)

# ************************************************
# Get graph execution info
# ************************************************

graph_exec_info = smart_scraper_graph.get_execution_info()
graph_exec_info = json_scraper_graph.get_execution_info()
print(prettify_exec_info(graph_exec_info))

# Save to json or csv
Expand Down
12 changes: 6 additions & 6 deletions examples/local_models/Ollama/xml_scraper_ollama.py
Original file line number Diff line number Diff line change
@@ -1,10 +1,10 @@
"""
Basic example of scraping pipeline using XmlScraperGraph from XML documents
Basic example of scraping pipeline using XMLScraperGraph from XML documents
"""

import os
from dotenv import load_dotenv
from scrapegraphai.graphs import XmlScraperGraph
from scrapegraphai.graphs import XMLScraperGraph
from scrapegraphai.utils import convert_to_csv, convert_to_json, prettify_exec_info
load_dotenv()

Expand Down Expand Up @@ -39,23 +39,23 @@
}

# ************************************************
# Create the XmlScraperGraph instance and run it
# Create the XMLScraperGraph instance and run it
# ************************************************

smart_scraper_graph = XmlScraperGraph(
xml_scraper_graph = XMLScraperGraph(
prompt="List me all the authors, title and genres of the books",
source=text, # Pass the content of the file, not the file object
config=graph_config
)

result = smart_scraper_graph.run()
result = xml_scraper_graph.run()
print(result)

# ************************************************
# Get graph execution info
# ************************************************

graph_exec_info = smart_scraper_graph.get_execution_info()
graph_exec_info = xml_scraper_graph.get_execution_info()
print(prettify_exec_info(graph_exec_info))

# Save to json or csv
Expand Down
1 change: 1 addition & 0 deletions examples/local_models/result.json
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
{"projects": [{"title": "Rotary Pendulum RL", "description": "Open Source project aimed at controlling a real life rotary pendulum using RL algorithms"}, {"title": "DQN Implementation from scratch", "description": "Developed a Deep Q-Network algorithm to train a simple and double pendulum"}, {"title": "Multi Agents HAED", "description": "University project which focuses on simulating a multi-agent system to perform environment mapping. Agents, equipped with sensors, explore and record their surroundings, considering uncertainties in their readings."}, {"title": "Wireless ESC for Modular Drones", "description": "Modular drone architecture proposal and proof of concept. The project received maximum grade."}]}
12 changes: 6 additions & 6 deletions examples/openai/json_scraper_openai.py
Original file line number Diff line number Diff line change
@@ -1,10 +1,10 @@
"""
Basic example of scraping pipeline using JsonScraperGraph from JSON documents
Basic example of scraping pipeline using JSONScraperGraph from JSON documents
"""

import os
from dotenv import load_dotenv
from scrapegraphai.graphs import JsonScraperGraph
from scrapegraphai.graphs import JSONScraperGraph
from scrapegraphai.utils import convert_to_csv, convert_to_json, prettify_exec_info
load_dotenv()

Expand Down Expand Up @@ -33,23 +33,23 @@
}

# ************************************************
# Create the XmlScraperGraph instance and run it
# Create the JSONScraperGraph instance and run it
# ************************************************

smart_scraper_graph = JsonScraperGraph(
json_scraper_graph = JSONScraperGraph(
prompt="List me all the authors, title and genres of the books",
source=text, # Pass the content of the file, not the file object
config=graph_config
)

result = smart_scraper_graph.run()
result = json_scraper_graph.run()
print(result)

# ************************************************
# Get graph execution info
# ************************************************

graph_exec_info = smart_scraper_graph.get_execution_info()
graph_exec_info = json_scraper_graph.get_execution_info()
print(prettify_exec_info(graph_exec_info))

# Save to json or csv
Expand Down
1 change: 1 addition & 0 deletions examples/openai/result.json
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
{"top_5_eyeliner_products_for_gift": [{"product_name": "Tarte Double Take Eyeliner", "type": "Liquid, Gel", "price": "$26", "link": "https://www.sephora.com/product/double-take-eyeliner-P421701"}, {"product_name": "AppleDoll Velvet Liner", "type": "Liquid", "price": "$22", "link": "https://www.appledoll.com/products/velvet-liner"}, {"product_name": "Rare Beauty Perfect Strokes Gel Eyeliner", "type": "Gel", "price": "$19", "link": "https://www.sephora.com/product/perfect-strokes-gel-eyeliner-P468000"}, {"product_name": "Laura Mercier Caviar Tightline Eyeliner", "type": "Gel", "price": "$29", "link": "https://www.sephora.com/product/caviar-tightline-eyeliner-P448800"}, {"product_name": "Ilia Clean Line Liquid Eyeliner", "type": "Liquid", "price": "$28", "link": "https://www.amazon.com/ILIA-Clean-Line-Liquid-Eyeliner/dp/B08Z7JZQZP"}, {"brand": "Tom Ford", "product_name": "Eye Defining Pen", "price": "$62", "type": "Liquid", "colors": 1, "retailer": "Nordstrom"}, {"brand": "Fenty Beauty", "product_name": "Flyliner", "price": "$24", "type": "Liquid", "colors": 2, "retailer": "Sephora"}, {"brand": "Lanc\u00f4me", "product_name": "Le Crayon Kh\u00f4l Smoky Eyeliner", "price": "$28", "type": "Kohl", "colors": 2, "retailer": "Macy's"}, {"brand": "Jillian Dempsey", "product_name": "Kh\u00f4l Eyeliner", "price": "$20", "type": "Kohl", "colors": 6, "retailer": "Amazon"}, {"brand": "R\u00f3en", "product_name": "Eyeline Define Eyeliner Pencil", "price": "$26", "type": "Kohl", "colors": 4, "retailer": "Credo Beauty"}]}
12 changes: 6 additions & 6 deletions examples/openai/xml_scraper_openai.py
Original file line number Diff line number Diff line change
@@ -1,10 +1,10 @@
"""
Basic example of scraping pipeline using XmlScraperGraph from XML documents
Basic example of scraping pipeline using XMLScraperGraph from XML documents
"""

import os
from dotenv import load_dotenv
from scrapegraphai.graphs import XmlScraperGraph
from scrapegraphai.graphs import XMLScraperGraph
from scrapegraphai.utils import convert_to_csv, convert_to_json, prettify_exec_info
load_dotenv()

Expand Down Expand Up @@ -33,23 +33,23 @@
}

# ************************************************
# Create the XmlScraperGraph instance and run it
# Create the XMLScraperGraph instance and run it
# ************************************************

smart_scraper_graph = XmlScraperGraph(
xml_scraper_graph = XMLScraperGraph(
prompt="List me all the authors, title and genres of the books",
source=text, # Pass the content of the file, not the file object
config=graph_config
)

result = smart_scraper_graph.run()
result = xml_scraper_graph.run()
print(result)

# ************************************************
# Get graph execution info
# ************************************************

graph_exec_info = smart_scraper_graph.get_execution_info()
graph_exec_info = xml_scraper_graph.get_execution_info()
print(prettify_exec_info(graph_exec_info))

# Save to json or csv
Expand Down
4 changes: 2 additions & 2 deletions scrapegraphai/graphs/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,5 +6,5 @@
from .speech_graph import SpeechGraph
from .search_graph import SearchGraph
from .script_creator_graph import ScriptCreatorGraph
from .xml_scraper_graph import XmlScraperGraph
from .json_scraper_graph import JsonScraperGraph
from .xml_scraper_graph import XMLScraperGraph
from .json_scraper_graph import JSONScraperGraph
2 changes: 1 addition & 1 deletion scrapegraphai/graphs/json_scraper_graph.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@
from .abstract_graph import AbstractGraph


class JsonScraperGraph(AbstractGraph):
class JSONScraperGraph(AbstractGraph):
"""
SmartScraper is a comprehensive web scraping tool that automates the process of extracting
information from web pages using a natural language model to interpret and answer prompts.
Expand Down
2 changes: 1 addition & 1 deletion scrapegraphai/graphs/xml_scraper_graph.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@
from .abstract_graph import AbstractGraph


class XmlScraperGraph(AbstractGraph):
class XMLScraperGraph(AbstractGraph):
"""
SmartScraper is a comprehensive web scraping tool that automates the process of extracting
information from web pages using a natural language model to interpret and answer prompts.
Expand Down

0 comments on commit 8fba7e5

Please sign in to comment.