firefly007 |
7th May 2021 10:56 |
Twitter auto upload bot
First a quick test script to grab something to upload to twitter
Obviously this can be anything u want
PHP Code:
<?php
$servername = "localhost"; $username = "root"; $password = ""; $dbname = "news2";
// Create connection $conn = new mysqli($servername, $username, $password, $dbname);
// Check connection if ($conn->connect_error) { die("Connection failed: " . $conn->connect_error); } //echo "Connected successfully";
function sanitize($input){ global $conn; $input = htmlentities($input); // convert symbols to html entities $input = addslashes($input); // server doesn't add slashes, so we will add them to escape ',",\,NULL $input = mysqli_real_escape_string($conn,$input); // escapes \x00, \n, \r, \, ', " and \x1a return $input; }
$context = stream_context_create( array( "http" => array( "header" => "User-Agent: Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/50.0.2661.102 Safari/537.36" ) ) );
//header('Content-Type: application/json'); $data = file_get_contents("http://feeds.bbci.co.uk/news/world/rss.xml", false, $context); $xml = new SimpleXMLElement($data); $json = json_encode($xml); $array = json_decode($json,TRUE);
$xml2 = simplexml_load_string($data); print($xml2->channel->item->title);
$NAME = sanitize($xml2->channel->item->title); $LINK = sanitize($array["channel"]["item"][0]["link"]); $GUID = sanitize($array["channel"]["item"][0]["pubDate"]); $GUID1 = md5($array["channel"]["item"][0]["pubDate"]); $ADDED = sanitize($array["channel"]["item"][0]["pubDate"]);
$sql = "SELECT guid FROM feed ORDER BY id DESC LIMIT 1"; $result = mysqli_query($conn, $sql);
if (mysqli_num_rows($result) > 0) {//check0 // output data of each row while($row = mysqli_fetch_assoc($result)) {//check1
if ($GUID1 == $row["guid"]){//check02 exit($NAME." is aready in the db");
}else{//check02
$sql = "INSERT INTO feed (name, link, guid, added) VALUES ('".$NAME."', '".$LINK."', '".$GUID1."', '".$ADDED."')";
if (mysqli_query($conn, $sql)) {//check3
exit($NAME." New record created successfully");
} else {//check3 echo "Error: " . $sql . "" . mysqli_error($conn); }//check3
}//check02
}//check0 } else {//check1 echo "0 results"; }//check1
Then the bot which uploads the scraped info into twitter
You will need to install selenium and mariadb using pip3 and geckodriver web drivers
Code:
# Module Imports
import mariadb
import sys
import os
from datetime import datetime
from time import sleep
from tqdm import tqdm
TWIT_PASS = pasw
TWIT_USER = user
def prog(SL, TX):
for i in tqdm(range(4)):
sleep(SL)
else:
print("\n")
print(TX)
# Connect to MariaDB Platform
try:
conn = mariadb.connect(
user="root", password="", host="localhost", port=3306, database="news2"
)
except mariadb.Error as e:
print(f"Error connecting to MariaDB Platform: {e}")
sys.exit(1)
# Get Cursor
cur = conn.cursor()
def sel(FUNC_LINK, FUNC_GUID):
from selenium.webdriver.firefox.options import Options as FirefoxOptions
from selenium import webdriver
from selenium.webdriver.support.ui import WebDriverWait
options = FirefoxOptions()
options.add_argument("--headless")
sleep(3)
driver = webdriver.Firefox(options=options)
driver.get("https://twitter.com/login")
sleep(5)
driver.find_element_by_name("session[username_or_email]").send_keys(TWIT_USER)
sleep(4)
driver.find_element_by_name("session[password]").send_keys(TWIT_PASS)
sleep(4)
driver.find_element_by_xpath(
'//*[@id="react-root"]/div/div/div[2]/main/div/div/div[2]/form/div/div[3]/div/div/span/span'
).click()
sleep(3)
driver.get("https://twitter.com/compose/tweet")
sleep(5)
driver.find_element_by_xpath(
'//*[@id="layers"]/div[2]/div/div/div/div/div/div[2]/div[2]/div/div[3]/div/div/div/div[1]/div/div/div/div/div[2]/div[1]/div/div/div/div/div/div/div/div/div/div[1]/div/div/div/div[2]/div'
).send_keys(FUNC_LINK)
# driver.find_element_by_xpath('//*[@id="layers"]/div[2]/div/div/div/div/div/div[2]/div[2]/div/div[3]/div/div/div/div[1]/div/div/div/div/div[2]/div[1]/div/div/div/div/div/div/div/div/div/div[1]/div/div/div/div[2]/div').send_keys(FUNC_LINK)
sleep(3)
driver.find_element_by_xpath(
'//*[@id="layers"]/div[2]/div/div/div/div/div/div[2]/div[2]/div/div[3]/div/div/div/div[1]/div/div/div/div/div[2]/div[4]/div/div/div[2]/div/div/span/span'
).click()
sleep(3)
# self.driver = webdriver.Firefox(firefox_profile=profile, log_path='./home/firefly/Documents/geckodriver.log')
driver.get("https://twitter.com/logout")
sleep(3)
driver.find_element_by_xpath(
'//*[@id="layers"]/div[2]/div/div/div/div/div/div[2]/div[2]/div[3]/div[2]/div/span/span'
).click()
sleep(3)
cur.execute(
"INSERT INTO checkdupe (guid,added) VALUES (?, ?)", (FUNC_GUID, datetime.now())
)
conn.commit()
prog(3, "Logging out from Twitter")
driver.stop_client()
driver.close()
os.system("pkill -f firefox")
driver.quit()
cur.execute("SELECT guid FROM feed ORDER BY id DESC LIMIT 1")
last_id1 = cur.fetchone()
for lastID1 in last_id1:
# print(lastID1)
VAR1 = lastID1
cur.execute("SELECT guid FROM checkdupe ORDER BY id DESC LIMIT 1")
last_id2 = cur.fetchone()
for lastID2 in last_id2:
# print(lastID2)
VAR2 = lastID2
if VAR1 != VAR2:
cur.execute("SELECT id,name,link,guid FROM feed ORDER BY id DESC LIMIT 1")
# Print Result-set
for (id, name, link, guid) in cur:
VAR_ID = id
VAR_NAME = name
VAR_LINK = link
VAR_GUID = guid
prog(3, "Posting {VAR_NAME} to twitter.")
sel(VAR_LINK, VAR_GUID)
elif VAR1 == VAR2:
print("Dupe entry: Halt!")
|