main
oscar 11 months ago
parent ee810dd696
commit eafc36e805

@ -1,64 +1,6 @@
from BunnyCDN.Storage import Storage
from datetime import datetime
import os, config, funcs
from PIL import Image
import time
def UploadMedia(filepath, username, media_id, timestamp = None, user_id = None):
filename = os.path.basename(filepath)
file_extension = os.path.splitext(filename)[1].lower()
media_type = funcs.get_media_type(filename)
post_type = funcs.determine_post_type(filepath, media_type)
file_hash = funcs.calculate_file_hash(filepath)
duration = funcs.get_video_duration(filepath) if media_type == 'video' else 0
post_date = datetime.fromtimestamp(int(timestamp)) if timestamp else datetime.now()
width, height = funcs.get_video_dimensions(filepath) if media_type == 'video' else Image.open(filepath).size
server_path = f'media/{post_type}/{username}/{media_id}{file_extension}'
file_url = f"https://storysave.b-cdn.net/{server_path}"
if media_id and int(media_id) in existing_files:
print('Duplicate file detected. Removing...')
os.remove(filepath)
return True
existing_files.append(int(media_id))
obj_storage.PutFile(filepath, server_path)
query = "INSERT IGNORE INTO media (username, media_type, media_url, width, height, media_id, post_type, date, user_id, hash, filename, duration) VALUES (%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s)"
values = (username, media_type, file_url, width, height, media_id, post_type, post_date, user_id, file_hash, filename, duration)
newCursor.execute(query, values)
newDB.commit()
print(f'[{newCursor.rowcount}] records updated. File {filename} uploaded to {file_url}')
os.remove(filepath)
return True
def dump_instagram(folder_path):
for filename in os.listdir(folder_path):
parts = filename.split('~')
if len(parts) < 4:
continue
username = parts[0]
timestamp = parts[1]
media_id = parts[2]
user_id = parts[3].split('_')[-1].split('.')[0]
filepath = os.path.join(folder_path, filename)
UploadMedia(username=username, filepath=filepath, media_id=media_id, timestamp=timestamp, user_id=user_id)
import dump_instagram as storysaver
import time, config
if __name__ == '__main__':
print('Starting processing...')
@ -72,6 +14,6 @@ if __name__ == '__main__':
while True:
print("Processing...")
dump_instagram('storysaver/')
storysaver.dump_instagram('storysaver/')
print("Processing completed.")
time.sleep(15)

@ -5,6 +5,11 @@ from PIL import Image
def UploadMedia(filepath, username, media_id, timestamp = None, user_id = None):
if media_id and int(media_id) in existing_files:
print('Duplicate file detected. Removing...')
os.remove(filepath)
return True
filename = os.path.basename(filepath)
file_extension = os.path.splitext(filename)[1].lower()
@ -24,10 +29,21 @@ def UploadMedia(filepath, username, media_id, timestamp = None, user_id = None):
file_url = f"https://storysave.b-cdn.net/{server_path}"
if media_id and int(media_id) in existing_files:
print('Duplicate file detected. Removing...')
os.remove(filepath)
return True
if user_id and 'highlight' in user_id:
highlight_id = user_id.replace('highlight', '')
user_id = None
try:
newCursor.execute("SELECT user_id FROM media WHERE username=%s", (username,))
user_id = newCursor.fetchall()[0][0]
except:
print(f'User {username} not found in database. Skipping...')
user_id = None
newCursor.execute("INSERT IGNORE INTO highlights (highlight_id, user_id, media_id) VALUES (%s, %s, %s)", (highlight_id, user_id, media_id))
newDB.commit()
print(f'[{newCursor.rowcount}] added highlight {highlight_id} to user {user_id}')
obj_storage.PutFile(filepath, server_path)
@ -48,7 +64,7 @@ def dump_instagram(folder_path):
parts = filename.split('~')
if len(parts) < 4:
continue
username = parts[0]
timestamp = parts[1]
media_id = parts[2]

@ -0,0 +1,2 @@
https://www.instagram.com/anya_shtril/
https://www.instagram.com/anyarodionov/

@ -0,0 +1,37 @@
{
"uuids": {
"phone_id": "6dc6b4c0-375c-416f-8e62-3ff2d417e4f5",
"uuid": "224b114f-b02e-4473-80aa-f59d62199a11",
"client_session_id": "60907f75-8352-4d76-b7e6-1a90411cb758",
"advertising_id": "1bac4b76-44b4-49a6-af0e-d24eee9ca975",
"android_device_id": "android-d6477804f91acf95",
"request_id": "90e58451-9e82-4fe7-987b-b1563aa3282c",
"tray_session_id": "011dccd2-040e-4471-915f-d4fcfa1e7907"
},
"mid": "ZnCZjQABAAGMSIaYXEanvavj6xms",
"ig_u_rur": null,
"ig_www_claim": null,
"authorization_data": {
"ds_user_id": "1587432849",
"sessionid": "1587432849%3AXXakd6sE4Iw3kR%3A2%3AAYcSu5MUADRQSpPi2YW89hUL5om1TD01EOZqxq5qHA"
},
"cookies": {},
"last_login": 1718658980.6259732,
"device_settings": {
"app_version": "269.0.0.18.75",
"android_version": 26,
"android_release": "8.0.0",
"dpi": "480dpi",
"resolution": "1080x1920",
"manufacturer": "OnePlus",
"device": "devitron",
"model": "6T Dev",
"cpu": "qcom",
"version_code": "314665256"
},
"user_agent": "Instagram 269.0.0.18.75 Android (26/8.0.0; 480dpi; 1080x1920; OnePlus; 6T Dev; devitron; qcom; en_US; 314665256)",
"country": "US",
"country_code": 1,
"locale": "en_US",
"timezone_offset": -14400
}

@ -0,0 +1,293 @@
import cv2, os, json, config, hashlib, requests
from concurrent.futures import ThreadPoolExecutor
from moviepy.editor import VideoFileClip
from cryptography.fernet import Fernet
from BunnyCDN.Storage import Storage
from instagrapi import Client
from PIL import Image
headers = {"User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/111.0.0.0 Safari/537.36"}
proxies={"http": "http://yehyuxsl-rotate:4tl5bvrwkz5e@p.webshare.io:80/","https": "http://yehyuxsl-rotate:4tl5bvrwkz5e@p.webshare.io:80/"}
def file_hash(filename):
h = hashlib.new('sha256')
with open(filename, "rb") as file:
while chunk := file.read(8192):
h.update(chunk)
return h.hexdigest()
def get_video_duration(file_path):
try:
with VideoFileClip(file_path) as video:
return video.duration
except:return 0
def login():
client = Client()
if os.path.exists("session_data.json"):
client.load_settings("session_data.json")
return client
with open("p.enc", "rb") as encrypted_file:
encrypted_data = encrypted_file.read()
fernet = Fernet(open("key.enc", "r").read())
password = str(fernet.decrypt(encrypted_data), "utf-8")
username = "olivercury"
auth = input("Enter your 2FA code (leave blank if not enabled): ")
client.login(username=username, password=password, verification_code=auth)
client.dump_settings("session_data.json")
print("Logged in successfully.")
return client
def parse_media_data(media_item):
mediaTypes = {1: 'image', 2: 'video', 8: 'album'}
fileExtension = "jpg" if media_item.media_type == 1 else "mp4"
mediaInfo = {}
mediaInfo['taken_at'] = media_item.taken_at if hasattr(media_item, 'taken_at') else None
mediaInfo['post_type'] = media_item.product_type
mediaInfo['media_type'] = mediaTypes[media_item.media_type]
mediaInfo['user_id'] = media_item.user.pk
mediaInfo['media_id'] = int(media_item.pk)
mediaInfo['filename'] = f"{media_item.pk}.{fileExtension}"
mediaInfo['media_url'] = media_item.thumbnail_url if media_item.media_type == 1 else media_item.video_url
return mediaInfo
def download_file(url, filePath):
try:
response = requests.get(url, stream=True, headers=headers)
response.raise_for_status()
directory = os.path.dirname(filePath)
if not os.path.exists(directory):
os.makedirs(directory)
with open(filePath, "wb") as out_file:
for chunk in response.iter_content(chunk_size=8192):
out_file.write(chunk)
print(f"Downloaded {filePath}")
except Exception as e:
print(f"Failed to download {url}. Error: {e}")
def upload_to_storage(local_path, server_path):
try:
obj_storage = Storage("345697f9-d9aa-4a6b-a5ec8bffc16d-ceaf-453e", "storysave")
obj_storage.PutFile(local_path, server_path)
print(f"Uploaded to https://storysave.b-cdn.net/{server_path}")
except Exception as e:
print(f"Failed to upload {local_path} to {server_path}. Error: {e}")
def add_media_to_db(mediaInfo):
media_id = mediaInfo["media_id"]
user_id = mediaInfo["user_id"]
username = mediaInfo["username"]
date = mediaInfo["taken_at"] if "taken_at" in mediaInfo else None
media_type = mediaInfo["media_type"]
post_type = mediaInfo["post_type"]
duration = mediaInfo.get("duration", 0)
media_url = mediaInfo["media_url"]
width = mediaInfo["width"]
height = mediaInfo["height"]
filehash = mediaInfo["hash"]
try:
db, cursor = config.gen_connection()
query = """
INSERT INTO media (user_id, username, date, media_type, post_type, media_url, duration, width, height, media_id, hash)
VALUES (%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s)
"""
data = (user_id, username, date, media_type, post_type, media_url, duration, width, height, media_id, filehash)
cursor.execute(query, data)
db.commit()
print(f"Added media for {username} to the database.")
except Exception as e:
print(f"Failed to add media for {username} to the database. Error: {e}")
def insert_highlight_items(media_ids, highlight_id, title, user_id):
try:
db, cursor = config.gen_connection()
query = "INSERT IGNORE INTO highlights (media_id, highlight_id, title, user_id) VALUES (%s, %s, %s, %s)"
values = [(media_id, highlight_id, title, user_id) for media_id in media_ids]
cursor.executemany(query, values)
db.commit()
if cursor.rowcount > 0:
print(f"Added {cursor.rowcount} highlight items to the database.")
except Exception as e:
print(f"Failed to add highlight items to the database. Error: {e}")
def get_video_dimensions(video_path):
cap = cv2.VideoCapture(video_path)
width = int(cap.get(cv2.CAP_PROP_FRAME_WIDTH))
height = int(cap.get(cv2.CAP_PROP_FRAME_HEIGHT))
cap.release()
return width, height
def getAllStories(client, user_id, firstImport=False):
stories = client.user_stories(user_id)
highlights = client.user_highlights(user_id)
for highlight in highlights:
highlight_items = client.highlight_info_v1(highlight.pk).items
stories.extend(highlight_items)
parsedStories = []
for story in stories:
mediaInfo = parse_media_data(story)
parsedStories.append(mediaInfo)
return stories
def getAllPosts(client, user_id):
posts = client.user_medias(user_id, 36)
medias = []
for post in posts:
if post.media_type == 8:
for item in post.resources:
medias.append(item)
continue
medias.append(post)
parsedPosts = []
for media in medias:
mediaInfo = parse_media_data(media)
parsedPosts.append(mediaInfo)
return posts
if __name__ == "__main__":
client = login()
db, cursor = config.gen_connection()
cursor.execute("SELECT instagram_username, instagram_user_id, favorite FROM following ORDER BY id DESC;")
following = cursor.fetchall()
new_following = []
for user in following:
username, user_id, favorite = user
if bool(favorite):
new_following.insert(0, user)
else:
new_following.append(user)
following = new_following
cursor.execute("SELECT media_id FROM media WHERE media_id IS NOT NULL;")
existing_files = [media[0] for media in cursor.fetchall()]
continueFromLast = input("Continue from the last user? (y/N): ").lower() == "y"
if continueFromLast:
cursor.execute("SELECT username FROM media ORDER BY id DESC LIMIT 1;")
lastUser = cursor.fetchone()
if lastUser:
lastUser = lastUser[0]
for idx, user in enumerate(following):
if user[0] == lastUser:
following = following[idx:]
break
for user in following:
while True:
try:
firstImport = False
username, user_id, isFavorite = user
if not user_id:
firstImport = True
user_id = client.user_id_from_username(username)
cursor.execute("UPDATE following SET instagram_user_id = %s WHERE instagram_username = %s;", (user_id, username))
db.commit()
print(f"Updated user ID for {username} to {user_id}")
#################### profile picture ####################
#profilePath = os.path.join('media', 'profile', username, 'profile.jpg')
#profileURL = client.user_info(user_id).profile_pic_url_hd
#download_file(profileURL, profilePath)
#fileHash = file_hash(profilePath)
#serverPath = os.path.join(os.path.dirname(profilePath), f"{fileHash}.jpg")
#upload_to_storage(profilePath, serverPath)
#mediaInfo = {
# 'username': username,
# 'user_id': user_id,
# 'media_id': None,
# 'media_type': 'image',
# 'post_type': 'profile',
# 'media_url': f"https://storysave.b-cdn.net/{serverPath}",
# 'duration': 0,
# 'hash': fileHash
#}
#add_media_to_db(mediaInfo)
#################### profile picture ####################
allStories = getAllStories(client, user_id, firstImport)
allPosts = getAllPosts(client, user_id)
medias = allStories + allPosts
for media in medias:
mediaInfo = parse_media_data(media)
mediaType = "stories" if mediaInfo["post_type"] == "story" else "posts"
filePath = os.path.join('media', mediaType, username, mediaInfo['filename'])
mediaInfo["hash"] = file_hash(filePath)
download_file(mediaInfo['media_url'], filePath)
if mediaInfo["media_type"] == "image":
with Image.open(filePath) as img:
mediaInfo["width"], mediaInfo["height"] = img.size
else:
mediaInfo["width"], mediaInfo["height"] = get_video_dimensions(filePath)
mediaInfo["duration"] = get_video_duration(filePath)
upload_to_storage(filePath, filePath)
add_media_to_db(mediaInfo)
os.remove(filePath)
existing_files.append(mediaInfo["media_id"])
print("=====================================")
break
except Exception as e:
if "login_required" in str(e):
print("Please log in to your account again.")
os.remove("session_data.json")
client = login()
else:
print("An unexpected error occurred:", e)
break

File diff suppressed because it is too large Load Diff

@ -0,0 +1,24 @@
import json
filePath = 'test.json'
with open(filePath, 'r', encoding='utf-8') as f:
data = json.load(f)
print(data)
posts = data['data']['xdt_api__v1__feed__user_timeline_graphql_connection']['edges']
posts = [post['node'] for post in posts]
for post in posts:
biggestRes = 0
images = post['image_versions2']['candidates']
for image in images:
width = image['width']
height = image['height']
if width * height > biggestRes:
biggestRes = width * height
goodPost = post
print(goodPost)
Loading…
Cancel
Save